Cleaned up old files, added some mysteriously delayed pydantic models, and submitted to pypi to keep the package name.

This commit is contained in:
sneakers-the-rat 2023-08-25 00:52:12 -07:00
parent 4faaa8efe8
commit 63c6cef10b
17 changed files with 6127 additions and 67 deletions

View file

@ -2,3 +2,5 @@
Translating NWB schema language to linkml
(very WIP dont @ me)
Just submitting to pypi to squat the package name

View file

@ -1,2 +0,0 @@

View file

@ -1,4 +0,0 @@
NWB_SCHEMA_REPO = "https://github.com/NeurodataWithoutBorders/nwb-schema/"
JSON_SCHEMA = "nwb.schema.json"
SCHEMA_DIR = 'core/'
ROOT_NAMESPACE = 'core/nwb.namespace.yaml'

View file

@ -1,3 +0,0 @@
from linkml.generators.linkmlgen import LinkmlGenerator
from linkml_runtime.linkml_model import SchemaDefinition
from linkml_runtime.dumpers import yaml_dumper

View file

@ -1,29 +0,0 @@
"""
Practice translation of namespaces to linkml just as a warmup to
see what kind of operations we'll need
Notes:
- Handling "namespace" imports within namespaces separately as prefixes
"""
from typing import List
from dataclasses import dataclass
from nwb_schema_language import Namespace
from linkml.utils.schema_builder import SchemaBuilder
from linkml_runtime.linkml_model import SchemaDefinition
# top-level attributes
def build_schema(namespace: Namespace) -> SchemaDefinition:
return SchemaDefinition(
id=namespace.name,
name=namespace.name,
title=namespace.full_name,
version=namespace.version,
imports=[schema.source for schema in namespace.schema_ if schema.source is not None]
)

View file

@ -1,5 +1,5 @@
"""
Maps to change the loaded .yaml from nwb schema before it's
Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models
"""
from nwb_linkml.map import KeyMap, SCOPE_TYPES, PHASES

View file

@ -1,13 +0,0 @@
"""
Draft of a source to target mapping in pseudo-markup
"""
namespace_map = {
"source": "Namespace",
"outputs": [
{}
],
"maps": [
]
}

View file

@ -0,0 +1,411 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
TimeSeriesData.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesSync.update_forward_refs()
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,785 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ElectricalSeriesData(ConfiguredBaseModel):
"""
Recorded voltage data.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""")
array: Optional[ElectricalSeriesDataArray] = Field(None)
class ElectricalSeriesChannelConversion(ConfiguredBaseModel):
"""
Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.
"""
axis: Optional[int] = Field(None, description="""The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1.""")
array: Optional[ElectricalSeriesChannelConversionArray] = Field(None)
class SpikeEventSeriesData(ConfiguredBaseModel):
"""
Spike waveforms.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for waveforms, which is fixed to 'volts'.""")
array: Optional[SpikeEventSeriesDataArray] = Field(None)
class SpikeEventSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[SpikeEventSeriesTimestampsArray] = Field(None)
class FeatureExtractionDescription(ConfiguredBaseModel):
"""
Description of features (eg, ''PC1'') for each of the extracted features.
"""
array: Optional[FeatureExtractionDescriptionArray] = Field(None)
class FeatureExtractionFeatures(ConfiguredBaseModel):
"""
Multi-dimensional array of features extracted from each event.
"""
array: Optional[FeatureExtractionFeaturesArray] = Field(None)
class FeatureExtractionTimes(ConfiguredBaseModel):
"""
Times of events that features correspond to (can be a link).
"""
array: Optional[FeatureExtractionTimesArray] = Field(None)
class EventDetectionDetectionMethod(ConfiguredBaseModel):
"""
Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.
"""
None
class EventDetectionSourceIdx(ConfiguredBaseModel):
"""
Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.
"""
array: Optional[EventDetectionSourceIdxArray] = Field(None)
class EventDetectionTimes(ConfiguredBaseModel):
"""
Timestamps of events, in seconds.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for event times, which is fixed to 'seconds'.""")
array: Optional[EventDetectionTimesArray] = Field(None)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
None
class ClusterWaveformsWaveformFiltering(ConfiguredBaseModel):
"""
Filtering applied to data before generating mean/sd
"""
None
class ClusterWaveformsWaveformMean(ConfiguredBaseModel):
"""
The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)
"""
array: Optional[ClusterWaveformsWaveformMeanArray] = Field(None)
class ClusterWaveformsWaveformSd(ConfiguredBaseModel):
"""
Stdev of waveforms for each cluster, using the same indices as in mean
"""
array: Optional[ClusterWaveformsWaveformSdArray] = Field(None)
class ClusteringDescription(ConfiguredBaseModel):
"""
Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)
"""
None
class ClusteringNum(ConfiguredBaseModel):
"""
Cluster number of each event
"""
array: Optional[ClusteringNumArray] = Field(None)
class ClusteringPeakOverRms(ConfiguredBaseModel):
"""
Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).
"""
array: Optional[ClusteringPeakOverRmsArray] = Field(None)
class ClusteringTimes(ConfiguredBaseModel):
"""
Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.
"""
array: Optional[ClusteringTimesArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ElectricalSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_channels: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ElectricalSeriesChannelConversionArray(Arraylike):
num_channels: float = Field(...)
class SpikeEventSeriesDataArray(Arraylike):
num_events: float = Field(...)
num_samples: float = Field(...)
num_channels: Optional[float] = Field(None)
class SpikeEventSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class FeatureExtractionDescriptionArray(Arraylike):
num_features: str = Field(...)
class FeatureExtractionFeaturesArray(Arraylike):
num_events: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_features: Optional[float] = Field(None)
class FeatureExtractionTimesArray(Arraylike):
num_events: float = Field(...)
class EventDetectionSourceIdxArray(Arraylike):
num_events: int = Field(...)
class EventDetectionTimesArray(Arraylike):
num_events: float = Field(...)
class ClusterWaveformsWaveformMeanArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusterWaveformsWaveformSdArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusteringNumArray(Arraylike):
num_events: int = Field(...)
class ClusteringPeakOverRmsArray(Arraylike):
num_clusters: float = Field(...)
class ClusteringTimesArray(Arraylike):
num_events: float = Field(...)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class ElectricalSeriesElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class FeatureExtractionElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class ElectrodeGroup(NWBContainer):
"""
A physical grouping of electrodes, e.g. a shank of an array.
"""
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
location: Optional[str] = Field(None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""")
position: Optional[ElectrodeGroupPosition] = Field(None, description="""stereotaxic or common framework coordinates""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
"""
description: FeatureExtractionDescription = Field(..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""")
features: FeatureExtractionFeatures = Field(..., description="""Multi-dimensional array of features extracted from each event.""")
times: FeatureExtractionTimes = Field(..., description="""Times of events that features correspond to (can be a link).""")
electrodes: FeatureExtractionElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
class EventDetection(NWBDataInterface):
"""
Detected spike events from voltage trace(s).
"""
detection_method: EventDetectionDetectionMethod = Field(..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""")
source_idx: EventDetectionSourceIdx = Field(..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""")
times: EventDetectionTimes = Field(..., description="""Timestamps of events, in seconds.""")
class EventWaveform(NWBDataInterface):
"""
Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition.
"""
SpikeEventSeries: Optional[List[SpikeEventSeries]] = Field(default_factory=list, description="""SpikeEventSeries object(s) containing detected spike event waveforms.""")
class FilteredEphys(NWBDataInterface):
"""
Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing filtered electrophysiology data.""")
class LFP(NWBDataInterface):
"""
LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing LFP data for one or more channels.""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
waveform_filtering: ClusterWaveformsWaveformFiltering = Field(..., description="""Filtering applied to data before generating mean/sd""")
waveform_mean: ClusterWaveformsWaveformMean = Field(..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""")
waveform_sd: ClusterWaveformsWaveformSd = Field(..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""")
class Clustering(NWBDataInterface):
"""
DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting.
"""
description: ClusteringDescription = Field(..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""")
num: ClusteringNum = Field(..., description="""Cluster number of each event""")
peak_over_rms: ClusteringPeakOverRms = Field(..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""")
times: ClusteringTimes = Field(..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ElectricalSeries(TimeSeries):
"""
A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels.
"""
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
"""
data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: SpikeEventSeriesTimestamps = Field(..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""")
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ElectricalSeriesData.update_forward_refs()
ElectricalSeriesChannelConversion.update_forward_refs()
SpikeEventSeriesData.update_forward_refs()
SpikeEventSeriesTimestamps.update_forward_refs()
FeatureExtractionDescription.update_forward_refs()
FeatureExtractionFeatures.update_forward_refs()
FeatureExtractionTimes.update_forward_refs()
EventDetectionDetectionMethod.update_forward_refs()
EventDetectionSourceIdx.update_forward_refs()
EventDetectionTimes.update_forward_refs()
ElectrodeGroupPosition.update_forward_refs()
ClusterWaveformsWaveformFiltering.update_forward_refs()
ClusterWaveformsWaveformMean.update_forward_refs()
ClusterWaveformsWaveformSd.update_forward_refs()
ClusteringDescription.update_forward_refs()
ClusteringNum.update_forward_refs()
ClusteringPeakOverRms.update_forward_refs()
ClusteringTimes.update_forward_refs()
Arraylike.update_forward_refs()
ElectricalSeriesDataArray.update_forward_refs()
ElectricalSeriesChannelConversionArray.update_forward_refs()
SpikeEventSeriesDataArray.update_forward_refs()
SpikeEventSeriesTimestampsArray.update_forward_refs()
FeatureExtractionDescriptionArray.update_forward_refs()
FeatureExtractionFeaturesArray.update_forward_refs()
FeatureExtractionTimesArray.update_forward_refs()
EventDetectionSourceIdxArray.update_forward_refs()
EventDetectionTimesArray.update_forward_refs()
ClusterWaveformsWaveformMeanArray.update_forward_refs()
ClusterWaveformsWaveformSdArray.update_forward_refs()
ClusteringNumArray.update_forward_refs()
ClusteringPeakOverRmsArray.update_forward_refs()
ClusteringTimesArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
ElectricalSeriesElectrodes.update_forward_refs()
FeatureExtractionElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
ElectrodeGroup.update_forward_refs()
NWBDataInterface.update_forward_refs()
FeatureExtraction.update_forward_refs()
EventDetection.update_forward_refs()
EventWaveform.update_forward_refs()
FilteredEphys.update_forward_refs()
LFP.update_forward_refs()
ClusterWaveforms.update_forward_refs()
Clustering.update_forward_refs()
TimeSeries.update_forward_refs()
ElectricalSeries.update_forward_refs()
SpikeEventSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -0,0 +1,568 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the first measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis1PhaseMapArray] = Field(None)
class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
"""
Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis1PowerMapArray] = Field(None)
class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the second measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis2PhaseMapArray] = Field(None)
class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
"""
Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis2PowerMapArray] = Field(None)
class ImagingRetinotopyAxisDescriptions(ConfiguredBaseModel):
"""
Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].
"""
array: Optional[ImagingRetinotopyAxisDescriptionsArray] = Field(None)
class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
"""
Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[ImagingRetinotopyFocalDepthImageArray] = Field(None)
class ImagingRetinotopySignMap(ConfiguredBaseModel):
"""
Sine of the angle between the direction of the gradient in axis_1 and axis_2.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[ImagingRetinotopySignMapArray] = Field(None)
class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
"""
Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[ImagingRetinotopyVasculatureImageArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImagingRetinotopyAxis1PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis1PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxisDescriptionsArray(Arraylike):
axis_1_axis_2: str = Field(...)
class ImagingRetinotopyFocalDepthImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
class ImagingRetinotopySignMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyVasculatureImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class ImagingRetinotopy(NWBDataInterface):
"""
Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x).
"""
axis_1_phase_map: ImagingRetinotopyAxis1PhaseMap = Field(..., description="""Phase response to stimulus on the first measured axis.""")
axis_1_power_map: Optional[ImagingRetinotopyAxis1PowerMap] = Field(None, description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_2_phase_map: ImagingRetinotopyAxis2PhaseMap = Field(..., description="""Phase response to stimulus on the second measured axis.""")
axis_2_power_map: Optional[ImagingRetinotopyAxis2PowerMap] = Field(None, description="""Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_descriptions: ImagingRetinotopyAxisDescriptions = Field(..., description="""Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].""")
focal_depth_image: Optional[ImagingRetinotopyFocalDepthImage] = Field(None, description="""Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].""")
sign_map: Optional[ImagingRetinotopySignMap] = Field(None, description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""")
vasculature_image: ImagingRetinotopyVasculatureImage = Field(..., description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImagingRetinotopyAxis1PhaseMap.update_forward_refs()
ImagingRetinotopyAxis1PowerMap.update_forward_refs()
ImagingRetinotopyAxis2PhaseMap.update_forward_refs()
ImagingRetinotopyAxis2PowerMap.update_forward_refs()
ImagingRetinotopyAxisDescriptions.update_forward_refs()
ImagingRetinotopyFocalDepthImage.update_forward_refs()
ImagingRetinotopySignMap.update_forward_refs()
ImagingRetinotopyVasculatureImage.update_forward_refs()
Arraylike.update_forward_refs()
ImagingRetinotopyAxis1PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis1PowerMapArray.update_forward_refs()
ImagingRetinotopyAxis2PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis2PowerMapArray.update_forward_refs()
ImagingRetinotopyAxisDescriptionsArray.update_forward_refs()
ImagingRetinotopyFocalDepthImageArray.update_forward_refs()
ImagingRetinotopySignMapArray.update_forward_refs()
ImagingRetinotopyVasculatureImageArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
ImagingRetinotopy.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

3686
nwb_linkml/models/core.py Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,117 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Data.update_forward_refs()
Container.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
Arraylike.update_forward_refs()

View file

@ -0,0 +1,219 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,326 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "0.5.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class HERDKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDFilesArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntitiesArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectsArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntityKeysArray(Arraylike):
num_rows: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class HERDKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
array: Optional[HERDKeysArray] = Field(None)
class HERDFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
array: Optional[HERDFilesArray] = Field(None)
class HERDEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
array: Optional[HERDEntitiesArray] = Field(None)
class HERDObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
array: Optional[HERDObjectsArray] = Field(None)
class HERDObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
array: Optional[HERDObjectKeysArray] = Field(None)
class HERDEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
array: Optional[HERDEntityKeysArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class HERD(Container):
"""
HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files.
"""
keys: HERDKeys = Field(<built-in method keys of dict object at 0x10cdf3540>, description="""A table for storing user terms that are used to refer to external resources.""")
files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""")
entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""")
objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""")
object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""")
entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
HERDKeysArray.update_forward_refs()
HERDFilesArray.update_forward_refs()
HERDEntitiesArray.update_forward_refs()
HERDObjectsArray.update_forward_refs()
HERDObjectKeysArray.update_forward_refs()
HERDEntityKeysArray.update_forward_refs()
Data.update_forward_refs()
HERDKeys.update_forward_refs()
HERDFiles.update_forward_refs()
HERDEntities.update_forward_refs()
HERDObjects.update_forward_refs()
HERDObjectKeys.update_forward_refs()
HERDEntityKeys.update_forward_refs()
Container.update_forward_refs()
HERD.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
VectorData.update_forward_refs()
EnumData.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndex.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiers.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTable.update_forward_refs()
DynamicTableId.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
AlignedDynamicTable.update_forward_refs()

View file

@ -1,5 +1,5 @@
[tool.poetry]
name = "nwb_linkml"
name = "nwb-linkml"
version = "0.1.0"
description = "Translating NWB schema language to LinkML"
authors = ["sneakers-the-rat <JLSaunders987@gmail.com>"]
@ -14,10 +14,20 @@ packages = [
python = "^3.11"
pyyaml = "^6.0"
linkml-runtime = "^1.5.6"
nwb_schema_language = { path = './nwb_schema_language', develop = true }
#nwb_schema_language = { path = './nwb_schema_language', develop = true, optional = true }
pydantic = "<2"
rich = "^13.5.2"
linkml = "^1.5.7"
pytest = { version="^7.4.0", optional=true}
pytest-depends = {version="^1.0.1", optional=true}
dash = {version="^2.12.1", optional=true}
dash-cytoscape = {version="^0.3.0", optional=true}
[tool.poetry.extras]
dev = ["nwb_schema_language"]
tests = ["pytest", "pytest-depends"]
plot = ["dash", "dash-cytoscape"]
[tool.poetry.group.tests]
optional = true

View file

@ -1,13 +0,0 @@
import pytest
from .fixtures import nwb_core_fixture, tmp_output_dir
from nwb_linkml.maps.namespace import build_schema
from linkml_runtime.dumpers import yaml_dumper
def test_namespace_to_linkml(nwb_core_fixture, tmp_output_dir):
output_file = tmp_output_dir / 'nwb.namespace.yml'
schema = build_schema(nwb_core_fixture['nwb-core']['namespace'].namespaces[0])
yaml_dumper.dump(schema, output_file)