diff --git a/README.md b/README.md index fa80339..67678d3 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ -# translate-nwb +# nwb-linkml Translating NWB schema language to linkml + +(very WIP dont @ me) diff --git a/nwb_linkml/adapters/__init__.py b/nwb_linkml/adapters/__init__.py index e69de29..7647640 100644 --- a/nwb_linkml/adapters/__init__.py +++ b/nwb_linkml/adapters/__init__.py @@ -0,0 +1,4 @@ +from nwb_linkml.adapters.adapter import Adapter +from nwb_linkml.adapters.namespaces import NamespacesAdapter +from nwb_linkml.adapters.schema import SchemaAdapter +from nwb_linkml.adapters.classes import ClassAdapter \ No newline at end of file diff --git a/nwb_linkml/adapters/adapter.py b/nwb_linkml/adapters/adapter.py index eedfa5e..475b32d 100644 --- a/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/adapters/adapter.py @@ -4,7 +4,7 @@ Base class for adapters from abc import abstractmethod import warnings from dataclasses import dataclass, field -from typing import List, Dict, Type, Generator, Any, Tuple, Optional +from typing import List, Dict, Type, Generator, Any, Tuple, Optional, TypeVar, TypeVarTuple, Unpack from pydantic import BaseModel, Field, validator from linkml_runtime.linkml_model import Element, SchemaDefinition, ClassDefinition, SlotDefinition, TypeDefinition @@ -46,6 +46,8 @@ class BuildResult: self.types.extend(other.types) return self +T = TypeVar +Ts = TypeVarTuple('Ts') class Adapter(BaseModel): @abstractmethod @@ -84,7 +86,7 @@ class Adapter(BaseModel): yield item[1] - def walk_types(self, input: BaseModel | list | dict, get_type: Type | List[Type] | Tuple[Type]): + def walk_types(self, input: BaseModel | list | dict, get_type: T | List[Unpack[Ts]] | Tuple[Unpack[T]]) -> Generator[T, None, None]: if not isinstance(get_type, (list, tuple)): get_type = [get_type] diff --git a/nwb_linkml/adapters/arraylike.py b/nwb_linkml/adapters/arraylike.py new file mode 100644 index 0000000..139597f --- /dev/null +++ b/nwb_linkml/adapters/arraylike.py @@ -0,0 +1,2 @@ + + diff --git a/nwb_linkml/adapters/classes.py b/nwb_linkml/adapters/classes.py index 6ed4ff5..b68fcc9 100644 --- a/nwb_linkml/adapters/classes.py +++ b/nwb_linkml/adapters/classes.py @@ -3,10 +3,11 @@ Adapters to linkML classes """ import pdb from typing import List, Optional -from nwb_schema_language import Dataset, Group, ReferenceDtype, DTypeType +from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType from nwb_linkml.adapters.adapter import Adapter, BuildResult from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition - +from nwb_linkml.maps import QUANTITY_MAP +from nwb_linkml.lang_elements import Arraylike class ClassAdapter(Adapter): """ @@ -61,10 +62,122 @@ class ClassAdapter(Adapter): return name - def handle_dtype(self, dtype: DTypeType): + def handle_arraylike(self, dataset: Dataset, name:Optional[str]=None) -> Optional[ClassDefinition]: + """ + Handling the + + - dims + - shape + - dtype + + fields as they are used in datasets. We'll use the :class:`.Arraylike` class to imitate them. + + Specifically: + + - Each slot within a subclass indicates a possible dimension. + - Only dimensions that are present in all the dimension specifiers in the + original schema are required. + - Shape requirements are indicated using max/min cardinalities on the slot. + - The arraylike object should be stored in the `array` slot on the containing class + (since there are already properties named `data`) + + If any of `dims`, `shape`, or `dtype` are undefined, return `None` + + Args: + dataset (:class:`nwb_schema_language.Dataset`): The dataset defining the arraylike + name (str): If present, override the name of the class before appending _Array + (we don't use _get_full_name here because we want to eventually decouple these functions from this adapter + class, which is sort of a development crutch. Ideally all these methods would just work on base nwb schema language types) + """ + if not any((dataset.dims, dataset.shape)): + # none of the required properties are defined, that's fine. + return + elif not all((dataset.dims, dataset.shape)): + # need to have both if one is present! + raise ValueError(f"A dataset needs both dims and shape to define an arraylike object") + + # The schema language doesn't have a way of specifying a dataset/group is "abstract" + # and yet hdmf-common says you don't need a dtype if the dataset is "abstract" + # so.... + dtype = self.handle_dtype(dataset.dtype) + + # dims and shape are lists of lists. First we couple them + # (so each dim has its corresponding shape).. + # and then we take unique + # (dicts are ordered by default in recent pythons, + # while set() doesn't preserve order) + dims_shape = [] + for inner_dim, inner_shape in zip(dataset.dims, dataset.shape): + if isinstance(inner_dim, list): + # list of lists + dims_shape.extend([(dim, shape) for dim, shape in zip(inner_dim, inner_shape)]) + else: + # single-layer list + dims_shape.append((inner_dim, inner_shape)) + + dims_shape = tuple(dict.fromkeys(dims_shape).keys()) + + # now make slots for each of them + slots = [] + for dims, shape in dims_shape: + # if a dim is present in all possible combinations of dims, make it required + if all([dims in inner_dim for inner_dim in dataset.dims]): + required = True + else: + required = False + + # use cardinality to do shape + if shape == 'null': + cardinality = None + else: + cardinality = shape + + slots.append(SlotDefinition( + name=dims, + required=required, + maximum_cardinality=cardinality, + minimum_cardinality=cardinality, + range=dtype + )) + + # and then the class is just a subclass of `Arraylike` (which is imported by default from `nwb.language.yaml`) + if name: + pass + elif dataset.neurodata_type_def: + name = dataset.neurodata_type_def + elif dataset.name: + name = dataset.name + else: + raise ValueError(f"Dataset has no name or type definition, what do call it?") + + name = '_'.join([name, 'Array']) + + array_class = ClassDefinition( + name=name, + is_a="Arraylike", + attributes=slots + ) + return array_class + + + def handle_dtype(self, dtype: DTypeType | None) -> str: if isinstance(dtype, ReferenceDtype): return dtype.target_type + elif dtype is None or dtype == []: + # Some ill-defined datasets are "abstract" despite that not being in the schema language + return 'AnyType' + elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype): + # there is precisely one class that uses compound dtypes: + # TimeSeriesReferenceVectorData + # compoundDtypes are able to define a ragged table according to the schema + # but are used in this single case equivalently to attributes. + # so we'll... uh... treat them as slots. + # TODO + return 'AnyType' + #raise NotImplementedError('got distracted, need to implement') + else: + # flat dtype return dtype def build_attrs(self, cls: Dataset | Group) -> List[SlotDefinition]: @@ -72,7 +185,7 @@ class ClassAdapter(Adapter): SlotDefinition( name=attr.name, description=attr.doc, - range=self.handle_dtype(attr.dtype) + range=self.handle_dtype(attr.dtype), ) for attr in cls.attributes ] @@ -93,7 +206,8 @@ class ClassAdapter(Adapter): this_slot = SlotDefinition( name=subclass._get_name(), description=subclass.cls.doc, - range=subclass._get_full_name() + range=subclass._get_full_name(), + **QUANTITY_MAP[subclass.cls.quantity] ) nested_res.slots.append(this_slot) @@ -113,8 +227,6 @@ class ClassAdapter(Adapter): name = self._get_full_name() else: name = self._get_name() - # if name == 'TimeSeries': - # pdb.set_trace() # Get vanilla top-level attributes attrs = self.build_attrs(self.cls) @@ -122,16 +234,33 @@ class ClassAdapter(Adapter): # unnest and build subclasses in datasets and groups if isinstance(self.cls, Group): # only groups have sub-datasets and sub-groups + # split out the recursion step rather than making purely recursive because + # top-level datasets and groups are handled differently - they have names, + # and so we need to split out which things we unnest and which things + # can just be slots because they are already defined without knowing about + # the global state of the schema build. nested_res = self.build_subclasses(self.cls) attrs.extend(nested_res.slots) else: + # must be a dataset nested_res = BuildResult() + arraylike = self.handle_arraylike(self.cls, self._get_full_name()) + if arraylike: + # make a slot for the arraylike class + attrs.append( + SlotDefinition( + name='array', + range=arraylike.name + ) + ) + nested_res.classes.append(arraylike) + cls = ClassDefinition( name = name, is_a = self.cls.neurodata_type_inc, description=self.cls.doc, - attributes=attrs + attributes=attrs, ) res = BuildResult( classes = [cls, *nested_res.classes] diff --git a/nwb_linkml/lang_elements.py b/nwb_linkml/lang_elements.py index e7c440b..8ee539e 100644 --- a/nwb_linkml/lang_elements.py +++ b/nwb_linkml/lang_elements.py @@ -20,27 +20,27 @@ FlatDType = EnumDefinition( permissible_values=[PermissibleValue(p) for p in FlatDtype_source.__members__.keys()], ) -DimNameSlot = SlotDefinition( - name="dim_name", - range="string", - description="The name of a dimension" -) -DimShapeSlot = SlotDefinition( - name="dim_shape", - range="integer", - required=False -) -DimClass = ClassDefinition( - name="Dimension", - slots=[DimNameSlot.name, DimShapeSlot.name], - description="A single dimension within a shape" -) -DimSlot = SlotDefinition( - name="dim", - range=DimClass.name, - multivalued=True, - description="Slot representing the dimensions that a Shape can have" -) +# DimNameSlot = SlotDefinition( +# name="dim_name", +# range="string", +# description="The name of a dimension" +# ) +# DimShapeSlot = SlotDefinition( +# name="dim_shape", +# range="integer", +# required=False +# ) +# DimClass = ClassDefinition( +# name="Dimension", +# slots=[DimNameSlot.name, DimShapeSlot.name], +# description="A single dimension within a shape" +# ) +# DimSlot = SlotDefinition( +# name="dim", +# range=DimClass.name, +# multivalued=True, +# description="Slot representing the dimensions that a Shape can have" +# ) # ShapeClass = ClassDefinition( # name="Shape", @@ -61,14 +61,32 @@ for nwbtype, linkmltype in flat_to_linkml.items(): ) DTypeTypes.append(atype) +Arraylike = ClassDefinition( + name="Arraylike", + description= ("Container for arraylike information held in the dims, shape, and dtype properties." + "this is a special case to be interpreted by downstream i/o. this class has no slots" + "and is abstract by default." + "- Each slot within a subclass indicates a possible dimension." + "- Only dimensions that are present in all the dimension specifiers in the" + " original schema are required." + "- Shape requirements are indicated using max/min cardinalities on the slot." + ), + abstract=True +) + +AnyType = ClassDefinition( + name="AnyType", + class_uri="linkml:Any", + description="""Needed because some classes in hdmf-common are datasets without dtype""" +) NwbLangSchema = SchemaDefinition( name="nwb.language", id='nwb.language', description="Adapter objects to mimic the behavior of elements in the nwb-schema-language", enums=[FlatDType], - slots=[DimNameSlot, DimShapeSlot, DimSlot], - classes=[DimClass], + # slots=[DimNameSlot, DimShapeSlot, DimSlot], + classes=[Arraylike, AnyType], types=DTypeTypes, imports=['linkml:types'], prefixes={'linkml': Prefix('linkml','https://w3id.org/linkml')} diff --git a/nwb_linkml/maps/__init__.py b/nwb_linkml/maps/__init__.py index 6e2a50c..6078f74 100644 --- a/nwb_linkml/maps/__init__.py +++ b/nwb_linkml/maps/__init__.py @@ -1,2 +1,3 @@ # Import everything so it's defined, but shoudlnt' necessarily be used from here -from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC \ No newline at end of file +from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC +from nwb_linkml.maps.quantity import QUANTITY_MAP \ No newline at end of file diff --git a/nwb_linkml/maps/quantity.py b/nwb_linkml/maps/quantity.py new file mode 100644 index 0000000..9c0b769 --- /dev/null +++ b/nwb_linkml/maps/quantity.py @@ -0,0 +1,34 @@ +""" +Quantity maps on to two things: required and cardinality. + +Though it is technically possible to use an integer as +a quantity, that is never done in the core schema, +which is our only target for now. + +We will handle cardinality of array dimensions elsewhere +""" + +QUANTITY_MAP = { + '*': { + 'required': False, + 'multivalued': True + }, + '+': { + 'required': True, + 'multivalued': True + }, + '?': { + 'required': False, + 'multivalued': False + }, + 1: { + 'required': True, + 'multivalued': False + }, + # include the NoneType for indexing + None: { + 'required': None, + 'multivalued': None + } +} + diff --git a/nwb_linkml/models/core.nwb.behavior.py b/nwb_linkml/models/core.nwb.behavior.py new file mode 100644 index 0000000..767d8c2 --- /dev/null +++ b/nwb_linkml/models/core.nwb.behavior.py @@ -0,0 +1,929 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class SpatialSeriesData(ConfiguredBaseModel): + """ + 1-D or 2-D array storing position or direction relative to some reference frame. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + array: Optional[SpatialSeriesDataArray] = Field(None) + + +class SpatialSeriesReferenceFrame(ConfiguredBaseModel): + """ + Description defining what exactly 'straight-ahead' means. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class SpatialSeriesDataArray(Arraylike): + + num_times: float = Field(...) + x: Optional[float] = Field(None) + xy: Optional[float] = Field(None) + xyz: Optional[float] = Field(None) + + +class AbstractFeatureSeriesData(ConfiguredBaseModel): + """ + Values of each feature at each time. + """ + unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""") + array: Optional[AbstractFeatureSeriesDataArray] = Field(None) + + +class AbstractFeatureSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_features: Optional[float] = Field(None) + + +class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel): + """ + Units of each feature. + """ + array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None) + + +class AbstractFeatureSeriesFeatureUnitsArray(Arraylike): + + num_features: str = Field(...) + + +class AbstractFeatureSeriesFeatures(ConfiguredBaseModel): + """ + Description of the features represented in TimeSeries::data. + """ + array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None) + + +class AbstractFeatureSeriesFeaturesArray(Arraylike): + + num_features: str = Field(...) + + +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[AnnotationSeriesDataArray] = Field(None) + + +class AnnotationSeriesDataArray(Arraylike): + + num_times: str = Field(...) + + +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[IntervalSeriesDataArray] = Field(None) + + +class IntervalSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + +class DecompositionSeriesData(ConfiguredBaseModel): + """ + Data decomposed into frequency bands. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""") + array: Optional[DecompositionSeriesDataArray] = Field(None) + + +class DecompositionSeriesDataArray(Arraylike): + + num_times: Optional[float] = Field(None) + num_channels: Optional[float] = Field(None) + num_bands: Optional[float] = Field(None) + + +class DecompositionSeriesMetric(ConfiguredBaseModel): + """ + The metric used, e.g. phase, amplitude, power. + """ + None + + +class DecompositionSeriesBandsBandLimitsArray(Arraylike): + + num_bands: Optional[float] = Field(None) + low_high: Optional[float] = Field(None) + + +class DecompositionSeriesBandsBandMeanArray(Arraylike): + + num_bands: float = Field(...) + + +class DecompositionSeriesBandsBandStdevArray(Arraylike): + + num_bands: float = Field(...) + + +class UnitsObsIntervalsArray(Arraylike): + + num_intervals: Optional[float] = Field(None) + start|end: Optional[float] = Field(None) + + +class UnitsWaveformMeanArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformSdArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformsArray(Arraylike): + + num_waveforms: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandName(VectorData): + """ + Name of the band, e.g. theta. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandLimits(VectorData): + """ + Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center. + """ + array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandMean(VectorData): + """ + The mean Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandStdev(VectorData): + """ + The standard deviation of Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimes(VectorData): + """ + Spike times for each unit in seconds. + """ + resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""") + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsObsIntervals(VectorData): + """ + Observation intervals for each unit. + """ + array: Optional[UnitsObsIntervalsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodeGroup(VectorData): + """ + Electrode group that each spike unit came from. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformSdArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimesIndex(VectorIndex): + """ + Index into the spike_times dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsObsIntervalsIndex(VectorIndex): + """ + Index into the obs_intervals dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodesIndex(VectorIndex): + """ + Index into electrodes. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndex(VectorIndex): + """ + Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndexIndex(VectorIndex): + """ + Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DecompositionSeriesSourceChannels(DynamicTableRegion): + """ + DynamicTableRegion pointer to the channels that this decomposition series was generated from. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class UnitsElectrodes(DynamicTableRegion): + """ + Electrode that each spike unit came from, specified using a DynamicTableRegion. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class DecompositionSeriesBands(DynamicTable): + """ + Table for describing the bands that this series was generated from. There should be one row in this table for each band. + """ + band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""") + band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""") + band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""") + band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class Units(DynamicTable): + """ + Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. + """ + spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""") + spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""") + obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""") + obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""") + electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""") + electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""") + electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""") + waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""") + waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""") + waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""") + waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class BehavioralEpochs(NWBDataInterface): + """ + TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data. + """ + IntervalSeries: Optional[List[IntervalSeries]] = Field(default_factory=list, description="""IntervalSeries object containing start and stop times of epochs.""") + + +class BehavioralEvents(NWBDataInterface): + """ + TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details. + """ + TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing behavioral events.""") + + +class BehavioralTimeSeries(NWBDataInterface): + """ + TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details. + """ + TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing continuous behavioral data.""") + + +class PupilTracking(NWBDataInterface): + """ + Eye-tracking data, representing pupil size. + """ + TimeSeries: List[TimeSeries] = Field(default_factory=list, description="""TimeSeries object containing time series data on pupil size.""") + + +class EyeTracking(NWBDataInterface): + """ + Eye-tracking data, representing direction of gaze. + """ + SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing data measuring direction of gaze.""") + + +class CompassDirection(NWBDataInterface): + """ + With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees. + """ + SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing direction of gaze travel.""") + + +class Position(NWBDataInterface): + """ + Position data, whether along the x, x/y or x/y/z axis. + """ + SpatialSeries: List[SpatialSeries] = Field(default_factory=list, description="""SpatialSeries object containing position data.""") + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class SpatialSeries(TimeSeries): + """ + Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values. + """ + data: SpatialSeriesData = Field(..., description="""1-D or 2-D array storing position or direction relative to some reference frame.""") + reference_frame: Optional[SpatialSeriesReferenceFrame] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AbstractFeatureSeries(TimeSeries): + """ + Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. + """ + data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""") + feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""") + features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AnnotationSeries(TimeSeries): + """ + Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. + """ + data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IntervalSeries(TimeSeries): + """ + Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. + """ + data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class DecompositionSeries(TimeSeries): + """ + Spectral analysis of a time series, e.g. of an LFP or a speech signal. + """ + data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""") + metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""") + bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +SpatialSeriesData.update_forward_refs() +SpatialSeriesReferenceFrame.update_forward_refs() +Arraylike.update_forward_refs() +SpatialSeriesDataArray.update_forward_refs() +AbstractFeatureSeriesData.update_forward_refs() +AbstractFeatureSeriesDataArray.update_forward_refs() +AbstractFeatureSeriesFeatureUnits.update_forward_refs() +AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs() +AbstractFeatureSeriesFeatures.update_forward_refs() +AbstractFeatureSeriesFeaturesArray.update_forward_refs() +AnnotationSeriesData.update_forward_refs() +AnnotationSeriesDataArray.update_forward_refs() +IntervalSeriesData.update_forward_refs() +IntervalSeriesDataArray.update_forward_refs() +DecompositionSeriesData.update_forward_refs() +DecompositionSeriesDataArray.update_forward_refs() +DecompositionSeriesMetric.update_forward_refs() +DecompositionSeriesBandsBandLimitsArray.update_forward_refs() +DecompositionSeriesBandsBandMeanArray.update_forward_refs() +DecompositionSeriesBandsBandStdevArray.update_forward_refs() +UnitsObsIntervalsArray.update_forward_refs() +UnitsWaveformMeanArray.update_forward_refs() +UnitsWaveformSdArray.update_forward_refs() +UnitsWaveformsArray.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +DecompositionSeriesBandsBandName.update_forward_refs() +DecompositionSeriesBandsBandLimits.update_forward_refs() +DecompositionSeriesBandsBandMean.update_forward_refs() +DecompositionSeriesBandsBandStdev.update_forward_refs() +UnitsSpikeTimes.update_forward_refs() +UnitsObsIntervals.update_forward_refs() +UnitsElectrodeGroup.update_forward_refs() +UnitsWaveformMean.update_forward_refs() +UnitsWaveformSd.update_forward_refs() +UnitsWaveforms.update_forward_refs() +VectorIndex.update_forward_refs() +UnitsSpikeTimesIndex.update_forward_refs() +UnitsObsIntervalsIndex.update_forward_refs() +UnitsElectrodesIndex.update_forward_refs() +UnitsWaveformsIndex.update_forward_refs() +UnitsWaveformsIndexIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DecompositionSeriesSourceChannels.update_forward_refs() +UnitsElectrodes.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +DecompositionSeriesBands.update_forward_refs() +Units.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +NWBData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +Image.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferences.update_forward_refs() +ImageReferencesArray.update_forward_refs() +NWBContainer.update_forward_refs() +NWBDataInterface.update_forward_refs() +BehavioralEpochs.update_forward_refs() +BehavioralEvents.update_forward_refs() +BehavioralTimeSeries.update_forward_refs() +PupilTracking.update_forward_refs() +EyeTracking.update_forward_refs() +CompassDirection.update_forward_refs() +Position.update_forward_refs() +TimeSeries.update_forward_refs() +SpatialSeries.update_forward_refs() +AbstractFeatureSeries.update_forward_refs() +AnnotationSeries.update_forward_refs() +IntervalSeries.update_forward_refs() +DecompositionSeries.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.device.py b/nwb_linkml/models/core.nwb.device.py new file mode 100644 index 0000000..962201d --- /dev/null +++ b/nwb_linkml/models/core.nwb.device.py @@ -0,0 +1,420 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class Device(NWBContainer): + """ + Metadata about a data acquisition device, e.g., recording system, electrode, microscope. + """ + description: Optional[str] = Field(None, description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""") + manufacturer: Optional[str] = Field(None, description="""The name of the manufacturer of the device.""") + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +Arraylike.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferencesArray.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +NWBData.update_forward_refs() +Image.update_forward_refs() +ImageReferences.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +VectorData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +VectorIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +NWBContainer.update_forward_refs() +Device.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +DynamicTable.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.epoch.py b/nwb_linkml/models/core.nwb.epoch.py new file mode 100644 index 0000000..0615738 --- /dev/null +++ b/nwb_linkml/models/core.nwb.epoch.py @@ -0,0 +1,484 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsStartTime(VectorData): + """ + Start time of epoch, in seconds. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsStopTime(VectorData): + """ + Stop time of epoch, in seconds. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsTags(VectorData): + """ + User-defined tags that identify or categorize events. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData): + """ + An index into a TimeSeries object. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class TimeIntervalsTagsIndex(VectorIndex): + """ + Index for tags. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class TimeIntervalsTimeseriesIndex(VectorIndex): + """ + Index for timeseries. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class TimeIntervals(DynamicTable): + """ + A container for aggregating epoch data and the TimeSeries that each epoch applies to. + """ + start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""") + stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""") + tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""") + tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""") + timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""") + timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +Arraylike.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferencesArray.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +NWBData.update_forward_refs() +Image.update_forward_refs() +ImageReferences.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +VectorData.update_forward_refs() +TimeIntervalsStartTime.update_forward_refs() +TimeIntervalsStopTime.update_forward_refs() +TimeIntervalsTags.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +TimeIntervalsTimeseries.update_forward_refs() +VectorIndex.update_forward_refs() +TimeIntervalsTagsIndex.update_forward_refs() +TimeIntervalsTimeseriesIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +NWBContainer.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +DynamicTable.update_forward_refs() +TimeIntervals.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.file.py b/nwb_linkml/models/core.nwb.file.py new file mode 100644 index 0000000..ce352ef --- /dev/null +++ b/nwb_linkml/models/core.nwb.file.py @@ -0,0 +1,3431 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class NWBFileFileCreateDate(ConfiguredBaseModel): + """ + A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array. + """ + array: Optional[NWBFileFileCreateDateArray] = Field(None) + + +class NWBFileIdentifier(ConfiguredBaseModel): + """ + A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files. + """ + None + + +class NWBFileSessionDescription(ConfiguredBaseModel): + """ + A description of the experimental session and data in the file. + """ + None + + +class NWBFileSessionStartTime(ConfiguredBaseModel): + """ + Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. + """ + None + + +class NWBFileTimestampsReferenceTime(ConfiguredBaseModel): + """ + Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero). + """ + None + + +class NWBFileAcquisition(ConfiguredBaseModel): + """ + Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis. + """ + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Acquired, raw data.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tabular data that is relevant to acquisition""") + + +class NWBFileAnalysis(ConfiguredBaseModel): + """ + Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs. + """ + NWBContainer: Optional[List[NWBContainer]] = Field(default_factory=list, description="""Custom analysis results.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tabular data that is relevant to data stored in analysis""") + + +class NWBFileScratch(ConfiguredBaseModel): + """ + A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard. + """ + ScratchData: Optional[List[ScratchData]] = Field(default_factory=list, description="""Any one-off datasets""") + NWBContainer: Optional[List[NWBContainer]] = Field(default_factory=list, description="""Any one-off containers""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Any one-off tables""") + + +class NWBFileProcessing(ConfiguredBaseModel): + """ + The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis. + """ + ProcessingModule: Optional[List[ProcessingModule]] = Field(default_factory=list, description="""Intermediate analysis of acquired data.""") + + +class NWBFileStimulus(ConfiguredBaseModel): + """ + Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file. + """ + presentation: NWBFileStimulusPresentation = Field(..., description="""Stimuli presented during the experiment.""") + templates: NWBFileStimulusTemplates = Field(..., description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""") + + +class NWBFileStimulusPresentation(ConfiguredBaseModel): + """ + Stimuli presented during the experiment. + """ + TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries objects containing data of presented stimuli.""") + + +class NWBFileStimulusTemplates(ConfiguredBaseModel): + """ + Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame. + """ + TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries objects containing template data of presented stimuli.""") + Images: Optional[List[Images]] = Field(default_factory=list, description="""Images objects containing images of presented stimuli.""") + + +class NWBFileGeneral(ConfiguredBaseModel): + """ + Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them. + """ + data_collection: Optional[NWBFileGeneralDataCollection] = Field(None, description="""Notes about data collection and analysis.""") + experiment_description: Optional[NWBFileGeneralExperimentDescription] = Field(None, description="""General description of the experiment.""") + experimenter: Optional[NWBFileGeneralExperimenter] = Field(None, description="""Name of person(s) who performed the experiment. Can also specify roles of different people involved.""") + institution: Optional[NWBFileGeneralInstitution] = Field(None, description="""Institution(s) where experiment was performed.""") + keywords: Optional[NWBFileGeneralKeywords] = Field(None, description="""Terms to search over.""") + lab: Optional[NWBFileGeneralLab] = Field(None, description="""Laboratory where experiment was performed.""") + notes: Optional[NWBFileGeneralNotes] = Field(None, description="""Notes about the experiment.""") + pharmacology: Optional[NWBFileGeneralPharmacology] = Field(None, description="""Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.""") + protocol: Optional[NWBFileGeneralProtocol] = Field(None, description="""Experimental protocol, if applicable. e.g., include IACUC protocol number.""") + related_publications: Optional[NWBFileGeneralRelatedPublications] = Field(None, description="""Publication information. PMID, DOI, URL, etc.""") + session_id: Optional[NWBFileGeneralSessionId] = Field(None, description="""Lab-specific ID for the session.""") + slices: Optional[NWBFileGeneralSlices] = Field(None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""") + source_script: Optional[NWBFileGeneralSourceScript] = Field(None, description="""Script file or link to public source code used to create this NWB file.""") + stimulus: Optional[NWBFileGeneralStimulus] = Field(None, description="""Notes about stimuli, such as how and where they were presented.""") + surgery: Optional[NWBFileGeneralSurgery] = Field(None, description="""Narrative description about surgery/surgeries, including date(s) and who performed surgery.""") + virus: Optional[NWBFileGeneralVirus] = Field(None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""") + LabMetaData: Optional[List[LabMetaData]] = Field(default_factory=list, description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""") + devices: Optional[NWBFileGeneralDevices] = Field(None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""") + subject: Optional[NWBFileGeneralSubject] = Field(None, description="""Information about the animal or person from which the data was measured.""") + extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(None, description="""Metadata related to extracellular electrophysiology.""") + intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(None, description="""Metadata related to intracellular electrophysiology.""") + optogenetics: Optional[NWBFileGeneralOptogenetics] = Field(None, description="""Metadata describing optogenetic stimuluation.""") + optophysiology: Optional[NWBFileGeneralOptophysiology] = Field(None, description="""Metadata related to optophysiology.""") + + +class NWBFileGeneralDataCollection(ConfiguredBaseModel): + """ + Notes about data collection and analysis. + """ + None + + +class NWBFileGeneralExperimentDescription(ConfiguredBaseModel): + """ + General description of the experiment. + """ + None + + +class NWBFileGeneralExperimenter(ConfiguredBaseModel): + """ + Name of person(s) who performed the experiment. Can also specify roles of different people involved. + """ + array: Optional[NWBFileGeneralExperimenterArray] = Field(None) + + +class NWBFileGeneralInstitution(ConfiguredBaseModel): + """ + Institution(s) where experiment was performed. + """ + None + + +class NWBFileGeneralKeywords(ConfiguredBaseModel): + """ + Terms to search over. + """ + array: Optional[NWBFileGeneralKeywordsArray] = Field(None) + + +class NWBFileGeneralLab(ConfiguredBaseModel): + """ + Laboratory where experiment was performed. + """ + None + + +class NWBFileGeneralNotes(ConfiguredBaseModel): + """ + Notes about the experiment. + """ + None + + +class NWBFileGeneralPharmacology(ConfiguredBaseModel): + """ + Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + """ + None + + +class NWBFileGeneralProtocol(ConfiguredBaseModel): + """ + Experimental protocol, if applicable. e.g., include IACUC protocol number. + """ + None + + +class NWBFileGeneralRelatedPublications(ConfiguredBaseModel): + """ + Publication information. PMID, DOI, URL, etc. + """ + array: Optional[NWBFileGeneralRelatedPublicationsArray] = Field(None) + + +class NWBFileGeneralSessionId(ConfiguredBaseModel): + """ + Lab-specific ID for the session. + """ + None + + +class NWBFileGeneralSlices(ConfiguredBaseModel): + """ + Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. + """ + None + + +class NWBFileGeneralSourceScript(ConfiguredBaseModel): + """ + Script file or link to public source code used to create this NWB file. + """ + file_name: Optional[str] = Field(None, description="""Name of script file.""") + + +class NWBFileGeneralStimulus(ConfiguredBaseModel): + """ + Notes about stimuli, such as how and where they were presented. + """ + None + + +class NWBFileGeneralSurgery(ConfiguredBaseModel): + """ + Narrative description about surgery/surgeries, including date(s) and who performed surgery. + """ + None + + +class NWBFileGeneralVirus(ConfiguredBaseModel): + """ + Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. + """ + None + + +class NWBFileGeneralDevices(ConfiguredBaseModel): + """ + Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc. + """ + Device: Optional[List[Device]] = Field(default_factory=list, description="""Data acquisition devices.""") + + +class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): + """ + Metadata related to extracellular electrophysiology. + """ + ElectrodeGroup: Optional[List[ElectrodeGroup]] = Field(default_factory=list, description="""Physical group of electrodes.""") + electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(None, description="""A table of all electrodes (i.e. channels) used for recording.""") + + +class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): + """ + Metadata related to intracellular electrophysiology. + """ + filtering: Optional[NWBFileGeneralIntracellularEphysFiltering] = Field(None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""") + IntracellularElectrode: Optional[List[IntracellularElectrode]] = Field(default_factory=list, description="""An intracellular electrode.""") + sweep_table: Optional[NWBFileGeneralIntracellularEphysSweepTable] = Field(None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""") + intracellular_recordings: Optional[NWBFileGeneralIntracellularEphysIntracellularRecordings] = Field(None, description="""A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.""") + simultaneous_recordings: Optional[NWBFileGeneralIntracellularEphysSimultaneousRecordings] = Field(None, description="""A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes""") + sequential_recordings: Optional[NWBFileGeneralIntracellularEphysSequentialRecordings] = Field(None, description="""A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence.""") + repetitions: Optional[NWBFileGeneralIntracellularEphysRepetitions] = Field(None, description="""A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.""") + experimental_conditions: Optional[NWBFileGeneralIntracellularEphysExperimentalConditions] = Field(None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""") + + +class NWBFileGeneralIntracellularEphysFiltering(ConfiguredBaseModel): + """ + [DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries. + """ + None + + +class NWBFileGeneralOptogenetics(ConfiguredBaseModel): + """ + Metadata describing optogenetic stimuluation. + """ + OptogeneticStimulusSite: Optional[List[OptogeneticStimulusSite]] = Field(default_factory=list, description="""An optogenetic stimulation site.""") + + +class NWBFileGeneralOptophysiology(ConfiguredBaseModel): + """ + Metadata related to optophysiology. + """ + ImagingPlane: Optional[List[ImagingPlane]] = Field(default_factory=list, description="""An imaging plane.""") + + +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + epochs: Optional[NWBFileIntervalsEpochs] = Field(None, description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""") + trials: Optional[NWBFileIntervalsTrials] = Field(None, description="""Repeated experimental events that have a logical grouping.""") + invalid_times: Optional[NWBFileIntervalsInvalidTimes] = Field(None, description="""Time intervals that should be removed from analysis.""") + TimeIntervals: Optional[List[TimeIntervals]] = Field(default_factory=list, description="""Optional additional table(s) for describing other experimental time intervals.""") + + +class SubjectAge(ConfiguredBaseModel): + """ + Age of subject. Can be supplied instead of 'date_of_birth'. + """ + reference: Optional[str] = Field(None, description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""") + + +class SubjectDateOfBirth(ConfiguredBaseModel): + """ + Date of birth of subject. Can be supplied instead of 'age'. + """ + None + + +class SubjectDescription(ConfiguredBaseModel): + """ + Description of subject and where subject came from (e.g., breeder, if animal). + """ + None + + +class SubjectGenotype(ConfiguredBaseModel): + """ + Genetic strain. If absent, assume Wild Type (WT). + """ + None + + +class SubjectSex(ConfiguredBaseModel): + """ + Gender of subject. + """ + None + + +class SubjectSpecies(ConfiguredBaseModel): + """ + Species of subject. + """ + None + + +class SubjectStrain(ConfiguredBaseModel): + """ + Strain of subject. + """ + None + + +class SubjectSubjectId(ConfiguredBaseModel): + """ + ID of animal/person used/participating in experiment (lab convention). + """ + None + + +class SubjectWeight(ConfiguredBaseModel): + """ + Weight at time of experiment, at time of surgery and at other important times. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class NWBFileFileCreateDateArray(Arraylike): + + num_modifications: date = Field(...) + + +class NWBFileGeneralExperimenterArray(Arraylike): + + num_experimenters: str = Field(...) + + +class NWBFileGeneralKeywordsArray(Arraylike): + + num_keywords: str = Field(...) + + +class NWBFileGeneralRelatedPublicationsArray(Arraylike): + + num_publications: str = Field(...) + + +class AbstractFeatureSeriesData(ConfiguredBaseModel): + """ + Values of each feature at each time. + """ + unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""") + array: Optional[AbstractFeatureSeriesDataArray] = Field(None) + + +class AbstractFeatureSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_features: Optional[float] = Field(None) + + +class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel): + """ + Units of each feature. + """ + array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None) + + +class AbstractFeatureSeriesFeatureUnitsArray(Arraylike): + + num_features: str = Field(...) + + +class AbstractFeatureSeriesFeatures(ConfiguredBaseModel): + """ + Description of the features represented in TimeSeries::data. + """ + array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None) + + +class AbstractFeatureSeriesFeaturesArray(Arraylike): + + num_features: str = Field(...) + + +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[AnnotationSeriesDataArray] = Field(None) + + +class AnnotationSeriesDataArray(Arraylike): + + num_times: str = Field(...) + + +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[IntervalSeriesDataArray] = Field(None) + + +class IntervalSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + +class DecompositionSeriesData(ConfiguredBaseModel): + """ + Data decomposed into frequency bands. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""") + array: Optional[DecompositionSeriesDataArray] = Field(None) + + +class DecompositionSeriesDataArray(Arraylike): + + num_times: Optional[float] = Field(None) + num_channels: Optional[float] = Field(None) + num_bands: Optional[float] = Field(None) + + +class DecompositionSeriesMetric(ConfiguredBaseModel): + """ + The metric used, e.g. phase, amplitude, power. + """ + None + + +class DecompositionSeriesBandsBandLimitsArray(Arraylike): + + num_bands: Optional[float] = Field(None) + low_high: Optional[float] = Field(None) + + +class DecompositionSeriesBandsBandMeanArray(Arraylike): + + num_bands: float = Field(...) + + +class DecompositionSeriesBandsBandStdevArray(Arraylike): + + num_bands: float = Field(...) + + +class UnitsObsIntervalsArray(Arraylike): + + num_intervals: Optional[float] = Field(None) + start|end: Optional[float] = Field(None) + + +class UnitsWaveformMeanArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformSdArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformsArray(Arraylike): + + num_waveforms: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesX(VectorData): + """ + x coordinate of the channel location in the brain (+x is posterior). + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesY(VectorData): + """ + y coordinate of the channel location in the brain (+y is inferior). + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesZ(VectorData): + """ + z coordinate of the channel location in the brain (+z is right). + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesImp(VectorData): + """ + Impedance of the channel, in ohms. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesLocation(VectorData): + """ + Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesFiltering(VectorData): + """ + Description of hardware filtering, including the filter name and frequency cutoffs. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesGroup(VectorData): + """ + Reference to the ElectrodeGroup this electrode is a part of. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesGroupName(VectorData): + """ + Name of the ElectrodeGroup this electrode is a part of. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesRelX(VectorData): + """ + x coordinate in electrode group + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesRelY(VectorData): + """ + y coordinate in electrode group + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesRelZ(VectorData): + """ + z coordinate in electrode group + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class NWBFileGeneralExtracellularEphysElectrodesReference(VectorData): + """ + Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\". + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandName(VectorData): + """ + Name of the band, e.g. theta. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandLimits(VectorData): + """ + Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center. + """ + array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandMean(VectorData): + """ + The mean Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandStdev(VectorData): + """ + The standard deviation of Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimes(VectorData): + """ + Spike times for each unit in seconds. + """ + resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""") + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsObsIntervals(VectorData): + """ + Observation intervals for each unit. + """ + array: Optional[UnitsObsIntervalsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodeGroup(VectorData): + """ + Electrode group that each spike unit came from. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformSdArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimesIndex(VectorIndex): + """ + Index into the spike_times dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsObsIntervalsIndex(VectorIndex): + """ + Index into the obs_intervals dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodesIndex(VectorIndex): + """ + Index into electrodes. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndex(VectorIndex): + """ + Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndexIndex(VectorIndex): + """ + Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DecompositionSeriesSourceChannels(DynamicTableRegion): + """ + DynamicTableRegion pointer to the channels that this decomposition series was generated from. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class UnitsElectrodes(DynamicTableRegion): + """ + Electrode that each spike unit came from, specified using a DynamicTableRegion. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): + """ + A table of all electrodes (i.e. channels) used for recording. + """ + x: Optional[NWBFileGeneralExtracellularEphysElectrodesX] = Field(None, description="""x coordinate of the channel location in the brain (+x is posterior).""") + y: Optional[NWBFileGeneralExtracellularEphysElectrodesY] = Field(None, description="""y coordinate of the channel location in the brain (+y is inferior).""") + z: Optional[NWBFileGeneralExtracellularEphysElectrodesZ] = Field(None, description="""z coordinate of the channel location in the brain (+z is right).""") + imp: Optional[NWBFileGeneralExtracellularEphysElectrodesImp] = Field(None, description="""Impedance of the channel, in ohms.""") + location: NWBFileGeneralExtracellularEphysElectrodesLocation = Field(..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + filtering: Optional[NWBFileGeneralExtracellularEphysElectrodesFiltering] = Field(None, description="""Description of hardware filtering, including the filter name and frequency cutoffs.""") + group: NWBFileGeneralExtracellularEphysElectrodesGroup = Field(..., description="""Reference to the ElectrodeGroup this electrode is a part of.""") + group_name: NWBFileGeneralExtracellularEphysElectrodesGroupName = Field(..., description="""Name of the ElectrodeGroup this electrode is a part of.""") + rel_x: Optional[NWBFileGeneralExtracellularEphysElectrodesRelX] = Field(None, description="""x coordinate in electrode group""") + rel_y: Optional[NWBFileGeneralExtracellularEphysElectrodesRelY] = Field(None, description="""y coordinate in electrode group""") + rel_z: Optional[NWBFileGeneralExtracellularEphysElectrodesRelZ] = Field(None, description="""z coordinate in electrode group""") + reference: Optional[NWBFileGeneralExtracellularEphysElectrodesReference] = Field(None, description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class DecompositionSeriesBands(DynamicTable): + """ + Table for describing the bands that this series was generated from. There should be one row in this table for each band. + """ + band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""") + band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""") + band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""") + band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class Units(DynamicTable): + """ + Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. + """ + spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""") + spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""") + obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""") + obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""") + electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""") + electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""") + electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""") + waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""") + waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""") + waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""") + waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileUnits(Units): + """ + Data about sorted spike units. + """ + spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""") + spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""") + obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""") + obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""") + electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""") + electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""") + electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""") + waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""") + waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""") + waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""") + waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class ScratchData(NWBData): + """ + Any one-off datasets + """ + notes: Optional[str] = Field(None, description="""Any notes the user has about the dataset being stored""") + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBFile(NWBContainer): + """ + An NWB file storing cellular-based neurophysiology data from a single experimental session. + """ + nwb_version: Optional[str] = Field(None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""") + file_create_date: NWBFileFileCreateDate = Field(..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""") + identifier: NWBFileIdentifier = Field(..., description="""A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files.""") + session_description: NWBFileSessionDescription = Field(..., description="""A description of the experimental session and data in the file.""") + session_start_time: NWBFileSessionStartTime = Field(..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""") + timestamps_reference_time: NWBFileTimestampsReferenceTime = Field(..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""") + acquisition: NWBFileAcquisition = Field(..., description="""Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis.""") + analysis: NWBFileAnalysis = Field(..., description="""Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs.""") + scratch: Optional[NWBFileScratch] = Field(None, description="""A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard.""") + processing: NWBFileProcessing = Field(..., description="""The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis.""") + stimulus: NWBFileStimulus = Field(..., description="""Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file.""") + general: NWBFileGeneral = Field(..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""") + intervals: Optional[NWBFileIntervals] = Field(None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""") + units: Optional[NWBFileUnits] = Field(None, description="""Data about sorted spike units.""") + + +class LabMetaData(NWBContainer): + """ + Lab-specific meta-data. + """ + None + + +class Subject(NWBContainer): + """ + Information about the animal or person from which the data was measured. + """ + age: Optional[SubjectAge] = Field(None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""") + date_of_birth: Optional[SubjectDateOfBirth] = Field(None, description="""Date of birth of subject. Can be supplied instead of 'age'.""") + description: Optional[SubjectDescription] = Field(None, description="""Description of subject and where subject came from (e.g., breeder, if animal).""") + genotype: Optional[SubjectGenotype] = Field(None, description="""Genetic strain. If absent, assume Wild Type (WT).""") + sex: Optional[SubjectSex] = Field(None, description="""Gender of subject.""") + species: Optional[SubjectSpecies] = Field(None, description="""Species of subject.""") + strain: Optional[SubjectStrain] = Field(None, description="""Strain of subject.""") + subject_id: Optional[SubjectSubjectId] = Field(None, description="""ID of animal/person used/participating in experiment (lab convention).""") + weight: Optional[SubjectWeight] = Field(None, description="""Weight at time of experiment, at time of surgery and at other important times.""") + + +class NWBFileGeneralSubject(Subject): + """ + Information about the animal or person from which the data was measured. + """ + age: Optional[SubjectAge] = Field(None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""") + date_of_birth: Optional[SubjectDateOfBirth] = Field(None, description="""Date of birth of subject. Can be supplied instead of 'age'.""") + description: Optional[SubjectDescription] = Field(None, description="""Description of subject and where subject came from (e.g., breeder, if animal).""") + genotype: Optional[SubjectGenotype] = Field(None, description="""Genetic strain. If absent, assume Wild Type (WT).""") + sex: Optional[SubjectSex] = Field(None, description="""Gender of subject.""") + species: Optional[SubjectSpecies] = Field(None, description="""Species of subject.""") + strain: Optional[SubjectStrain] = Field(None, description="""Strain of subject.""") + subject_id: Optional[SubjectSubjectId] = Field(None, description="""ID of animal/person used/participating in experiment (lab convention).""") + weight: Optional[SubjectWeight] = Field(None, description="""Weight at time of experiment, at time of surgery and at other important times.""") + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AbstractFeatureSeries(TimeSeries): + """ + Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. + """ + data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""") + feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""") + features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AnnotationSeries(TimeSeries): + """ + Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. + """ + data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IntervalSeries(TimeSeries): + """ + Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. + """ + data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class DecompositionSeries(TimeSeries): + """ + Spectral analysis of a time series, e.g. of an LFP or a speech signal. + """ + data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""") + metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""") + bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class TimeIntervals(DynamicTable): + """ + A container for aggregating epoch data and the TimeSeries that each epoch applies to. + """ + start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""") + stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""") + tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""") + tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""") + timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""") + timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileIntervalsEpochs(TimeIntervals): + """ + Divisions in time marking experimental stages or sub-divisions of a single recording session. + """ + start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""") + stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""") + tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""") + tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""") + timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""") + timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileIntervalsTrials(TimeIntervals): + """ + Repeated experimental events that have a logical grouping. + """ + start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""") + stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""") + tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""") + tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""") + timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""") + timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileIntervalsInvalidTimes(TimeIntervals): + """ + Time intervals that should be removed from analysis. + """ + start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""") + stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""") + tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""") + tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""") + timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""") + timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class TimeIntervalsStartTime(VectorData): + """ + Start time of epoch, in seconds. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsStopTime(VectorData): + """ + Stop time of epoch, in seconds. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsTags(VectorData): + """ + User-defined tags that identify or categorize events. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsTagsIndex(VectorIndex): + """ + Index for tags. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData): + """ + An index into a TimeSeries object. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeIntervalsTimeseriesIndex(VectorIndex): + """ + Index for timeseries. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class TwoPhotonSeriesFieldOfView(ConfiguredBaseModel): + """ + Width, height and depth of image, or imaged area, in meters. + """ + array: Optional[TwoPhotonSeriesFieldOfViewArray] = Field(None) + + +class TwoPhotonSeriesFieldOfViewArray(Arraylike): + + width|height: Optional[float] = Field(None) + width|height|depth: Optional[float] = Field(None) + + +class RoiResponseSeries(TimeSeries): + """ + ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. + """ + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") + rois: RoiResponseSeriesRois = Field(..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + array: Optional[RoiResponseSeriesDataArray] = Field(None) + + +class RoiResponseSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_ROIs: Optional[float] = Field(None) + + +class RoiResponseSeriesRois(DynamicTableRegion): + """ + DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DfOverF(NWBDataInterface): + """ + dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). + """ + RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing dF/F for a ROI.""") + + +class Fluorescence(NWBDataInterface): + """ + Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). + """ + RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing fluorescence data for a ROI.""") + + +class ImageSegmentation(NWBDataInterface): + """ + Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. + """ + PlaneSegmentation: List[PlaneSegmentation] = Field(default_factory=list, description="""Results from image segmentation of a specific imaging plane.""") + + +class PlaneSegmentation(DynamicTable): + """ + Results from image segmentation of a specific imaging plane. + """ + image_mask: Optional[PlaneSegmentationImageMask] = Field(None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""") + pixel_mask_index: Optional[PlaneSegmentationPixelMaskIndex] = Field(None, description="""Index into pixel_mask.""") + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""") + voxel_mask_index: Optional[PlaneSegmentationVoxelMaskIndex] = Field(None, description="""Index into voxel_mask.""") + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""") + reference_images: PlaneSegmentationReferenceImages = Field(..., description="""Image stacks that the segmentation masks apply to.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class PlaneSegmentationImageMask(VectorData): + """ + ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero. + """ + array: Optional[PlaneSegmentationImageMaskArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationImageMaskArray(Arraylike): + + num_roi: Any = Field(...) + num_x: Any = Field(...) + num_y: Any = Field(...) + num_z: Optional[Any] = Field(None) + + +class PlaneSegmentationPixelMaskIndex(VectorIndex): + """ + Index into pixel_mask. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class PlaneSegmentationVoxelMaskIndex(VectorIndex): + """ + Index into voxel_mask. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class PlaneSegmentationReferenceImages(ConfiguredBaseModel): + """ + Image stacks that the segmentation masks apply to. + """ + ImageSeries: Optional[List[ImageSeries]] = Field(default_factory=list, description="""One or more image stacks that the masks apply to (can be one-element stack).""") + + +class ImagingPlane(NWBContainer): + """ + An imaging plane and its metadata. + """ + description: Optional[ImagingPlaneDescription] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: ImagingPlaneExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[ImagingPlaneImagingRate] = Field(None, description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""") + indicator: ImagingPlaneIndicator = Field(..., description="""Calcium indicator.""") + location: ImagingPlaneLocation = Field(..., description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + manifold: Optional[ImagingPlaneManifold] = Field(None, description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""") + origin_coords: Optional[ImagingPlaneOriginCoords] = Field(None, description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""") + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(None, description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""") + reference_frame: Optional[ImagingPlaneReferenceFrame] = Field(None, description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""") + OpticalChannel: List[OpticalChannel] = Field(default_factory=list, description="""An optical channel used to record from an imaging plane.""") + + +class ImagingPlaneDescription(ConfiguredBaseModel): + """ + Description of the imaging plane. + """ + None + + +class ImagingPlaneExcitationLambda(ConfiguredBaseModel): + """ + Excitation wavelength, in nm. + """ + None + + +class ImagingPlaneImagingRate(ConfiguredBaseModel): + """ + Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. + """ + None + + +class ImagingPlaneIndicator(ConfiguredBaseModel): + """ + Calcium indicator. + """ + None + + +class ImagingPlaneLocation(ConfiguredBaseModel): + """ + Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'.""") + array: Optional[ImagingPlaneManifoldArray] = Field(None) + + +class ImagingPlaneManifoldArray(Arraylike): + + height: float = Field(...) + width: float = Field(...) + x_y_z: float = Field(...) + depth: Optional[float] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + unit: Optional[str] = Field(None, description="""Measurement units for origin_coords. The default value is 'meters'.""") + array: Optional[ImagingPlaneOriginCoordsArray] = Field(None) + + +class ImagingPlaneOriginCoordsArray(Arraylike): + + x_y: Optional[float] = Field(None) + x_y_z: Optional[float] = Field(None) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + unit: Optional[str] = Field(None, description="""Measurement units for grid_spacing. The default value is 'meters'.""") + array: Optional[ImagingPlaneGridSpacingArray] = Field(None) + + +class ImagingPlaneGridSpacingArray(Arraylike): + + x_y: Optional[float] = Field(None) + x_y_z: Optional[float] = Field(None) + + +class ImagingPlaneReferenceFrame(ConfiguredBaseModel): + """ + Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\" + """ + None + + +class OpticalChannel(NWBContainer): + """ + An optical channel used to record from an imaging plane. + """ + description: OpticalChannelDescription = Field(..., description="""Description or other notes about the channel.""") + emission_lambda: OpticalChannelEmissionLambda = Field(..., description="""Emission wavelength for channel, in nm.""") + + +class OpticalChannelDescription(ConfiguredBaseModel): + """ + Description or other notes about the channel. + """ + None + + +class OpticalChannelEmissionLambda(ConfiguredBaseModel): + """ + Emission wavelength for channel, in nm. + """ + None + + +class MotionCorrection(NWBDataInterface): + """ + An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). + """ + CorrectedImageStack: List[CorrectedImageStack] = Field(default_factory=list, description="""Reuslts from motion correction of an image stack.""") + + +class CorrectedImageStack(NWBDataInterface): + """ + Reuslts from motion correction of an image stack. + """ + corrected: CorrectedImageStackCorrected = Field(..., description="""Image stack with frames shifted to the common coordinates.""") + xy_translation: CorrectedImageStackXyTranslation = Field(..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""") + + +class CorrectedImageStackXyTranslation(TimeSeries): + """ + Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class GrayscaleImage(Image): + """ + A grayscale image. + """ + array: Optional[GrayscaleImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class GrayscaleImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + + +class RGBImage(Image): + """ + A color image. + """ + array: Optional[RGBImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b: Optional[float] = Field(None) + + +class RGBAImage(Image): + """ + A color image with transparency. + """ + array: Optional[RGBAImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBAImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageSeries(TimeSeries): + """ + General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OnePhotonSeries(ImageSeries): + """ + Image stack recorded over time from 1-photon microscope. + """ + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""") + exposure_time: Optional[float] = Field(None, description="""Exposure time of the sample; often the inverse of the frequency.""") + binning: Optional[int] = Field(None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""") + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + intensity: Optional[float] = Field(None, description="""Intensity of the excitation in mW/mm^2, if known.""") + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TwoPhotonSeries(ImageSeries): + """ + Image stack recorded over time from 2-photon microscope. + """ + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""") + field_of_view: Optional[TwoPhotonSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CorrectedImageStackCorrected(ImageSeries): + """ + Image stack with frames shifted to the common coordinates. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + array: Optional[ImageSeriesDataArray] = Field(None) + + +class ImageSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + z: Optional[float] = Field(None) + + +class ImageSeriesDimension(ConfiguredBaseModel): + """ + Number of pixels on x, y, (and z) axes. + """ + array: Optional[ImageSeriesDimensionArray] = Field(None) + + +class ImageSeriesDimensionArray(Arraylike): + + rank: int = Field(...) + + +class ImageSeriesExternalFile(ConfiguredBaseModel): + """ + Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + """ + starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""") + array: Optional[ImageSeriesExternalFileArray] = Field(None) + + +class ImageSeriesExternalFileArray(Arraylike): + + num_files: str = Field(...) + + +class ImageSeriesFormat(ConfiguredBaseModel): + """ + Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + """ + None + + +class ImageMaskSeries(ImageSeries): + """ + An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OpticalSeries(ImageSeries): + """ + Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. + """ + distance: Optional[OpticalSeriesDistance] = Field(None, description="""Distance from camera/monitor to target/eye.""") + field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""") + orientation: Optional[OpticalSeriesOrientation] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OpticalSeriesDistance(ConfiguredBaseModel): + """ + Distance from camera/monitor to target/eye. + """ + None + + +class OpticalSeriesFieldOfView(ConfiguredBaseModel): + """ + Width, height and depth of image, or imaged area, in meters. + """ + array: Optional[OpticalSeriesFieldOfViewArray] = Field(None) + + +class OpticalSeriesFieldOfViewArray(Arraylike): + + width_height: Optional[float] = Field(None) + width_height_depth: Optional[float] = Field(None) + + +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + array: Optional[OpticalSeriesDataArray] = Field(None) + + +class OpticalSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + + +class OpticalSeriesOrientation(ConfiguredBaseModel): + """ + Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. + """ + None + + +class IndexSeries(TimeSeries): + """ + Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. + """ + data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""") + array: Optional[IndexSeriesDataArray] = Field(None) + + +class IndexSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + +class OptogeneticSeries(TimeSeries): + """ + An optogenetic stimulus. + """ + data: OptogeneticSeriesData = Field(..., description="""Applied power for optogenetic stimulus, in watts.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for data, which is fixed to 'watts'.""") + array: Optional[OptogeneticSeriesDataArray] = Field(None) + + +class OptogeneticSeriesDataArray(Arraylike): + + num_times: float = Field(...) + + +class OptogeneticStimulusSite(NWBContainer): + """ + A site of optogenetic stimulation. + """ + description: OptogeneticStimulusSiteDescription = Field(..., description="""Description of stimulation site.""") + excitation_lambda: OptogeneticStimulusSiteExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""") + location: OptogeneticStimulusSiteLocation = Field(..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + + +class OptogeneticStimulusSiteDescription(ConfiguredBaseModel): + """ + Description of stimulation site. + """ + None + + +class OptogeneticStimulusSiteExcitationLambda(ConfiguredBaseModel): + """ + Excitation wavelength, in nm. + """ + None + + +class OptogeneticStimulusSiteLocation(ConfiguredBaseModel): + """ + Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class PatchClampSeries(TimeSeries): + """ + An abstract base class for patch-clamp data - stimulus or response, current or voltage. + """ + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class PatchClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage or current. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + array: Optional[PatchClampSeriesDataArray] = Field(None) + + +class PatchClampSeriesDataArray(Arraylike): + + num_times: float = Field(...) + + +class PatchClampSeriesGain(ConfiguredBaseModel): + """ + Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + """ + None + + +class CurrentClampSeries(PatchClampSeries): + """ + Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. + """ + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") + bias_current: Optional[CurrentClampSeriesBiasCurrent] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[CurrentClampSeriesBridgeBalance] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[CurrentClampSeriesCapacitanceCompensation] = Field(None, description="""Capacitance compensation, in farads.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CurrentClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class CurrentClampSeriesBiasCurrent(ConfiguredBaseModel): + """ + Bias current, in amps. + """ + None + + +class CurrentClampSeriesBridgeBalance(ConfiguredBaseModel): + """ + Bridge balance, in ohms. + """ + None + + +class CurrentClampSeriesCapacitanceCompensation(ConfiguredBaseModel): + """ + Capacitance compensation, in farads. + """ + None + + +class IZeroClampSeries(CurrentClampSeries): + """ + Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. + """ + stimulus_description: Optional[str] = Field(None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""") + bias_current: IZeroClampSeriesBiasCurrent = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: IZeroClampSeriesBridgeBalance = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: IZeroClampSeriesCapacitanceCompensation = Field(..., description="""Capacitance compensation, in farads, fixed to 0.0.""") + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IZeroClampSeriesBiasCurrent(ConfiguredBaseModel): + """ + Bias current, in amps, fixed to 0.0. + """ + None + + +class IZeroClampSeriesBridgeBalance(ConfiguredBaseModel): + """ + Bridge balance, in ohms, fixed to 0.0. + """ + None + + +class IZeroClampSeriesCapacitanceCompensation(ConfiguredBaseModel): + """ + Capacitance compensation, in farads, fixed to 0.0. + """ + None + + +class CurrentClampStimulusSeries(PatchClampSeries): + """ + Stimulus current applied during current clamp recording. + """ + data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CurrentClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus current applied. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class VoltageClampSeries(PatchClampSeries): + """ + Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. + """ + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") + capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(None, description="""Fast capacitance, in farads.""") + capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(None, description="""Slow capacitance, in farads.""") + resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(None, description="""Resistance compensation bandwidth, in hertz.""") + resistance_comp_correction: Optional[VoltageClampSeriesResistanceCompCorrection] = Field(None, description="""Resistance compensation correction, in percent.""") + resistance_comp_prediction: Optional[VoltageClampSeriesResistanceCompPrediction] = Field(None, description="""Resistance compensation prediction, in percent.""") + whole_cell_capacitance_comp: Optional[VoltageClampSeriesWholeCellCapacitanceComp] = Field(None, description="""Whole cell capacitance compensation, in farads.""") + whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = Field(None, description="""Whole cell series resistance compensation, in ohms.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): + """ + Fast capacitance, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""") + + +class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): + """ + Slow capacitance, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""") + + +class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): + """ + Resistance compensation bandwidth, in hertz. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""") + + +class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): + """ + Resistance compensation correction, in percent. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""") + + +class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): + """ + Resistance compensation prediction, in percent. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""") + + +class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): + """ + Whole cell capacitance compensation, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""") + + +class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): + """ + Whole cell series resistance compensation, in ohms. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""") + + +class VoltageClampStimulusSeries(PatchClampSeries): + """ + Stimulus voltage applied during a voltage clamp recording. + """ + data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class VoltageClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus voltage applied. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class IntracellularElectrode(NWBContainer): + """ + An intracellular electrode and its metadata. + """ + cell_id: Optional[IntracellularElectrodeCellId] = Field(None, description="""unique ID of the cell""") + description: IntracellularElectrodeDescription = Field(..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""") + filtering: Optional[IntracellularElectrodeFiltering] = Field(None, description="""Electrode specific filtering.""") + initial_access_resistance: Optional[IntracellularElectrodeInitialAccessResistance] = Field(None, description="""Initial access resistance.""") + location: Optional[IntracellularElectrodeLocation] = Field(None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + resistance: Optional[IntracellularElectrodeResistance] = Field(None, description="""Electrode resistance, in ohms.""") + seal: Optional[IntracellularElectrodeSeal] = Field(None, description="""Information about seal used for recording.""") + slice: Optional[IntracellularElectrodeSlice] = Field(None, description="""Information about slice used for recording.""") + + +class IntracellularElectrodeCellId(ConfiguredBaseModel): + """ + unique ID of the cell + """ + None + + +class IntracellularElectrodeDescription(ConfiguredBaseModel): + """ + Description of electrode (e.g., whole-cell, sharp, etc.). + """ + None + + +class IntracellularElectrodeFiltering(ConfiguredBaseModel): + """ + Electrode specific filtering. + """ + None + + +class IntracellularElectrodeInitialAccessResistance(ConfiguredBaseModel): + """ + Initial access resistance. + """ + None + + +class IntracellularElectrodeLocation(ConfiguredBaseModel): + """ + Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class IntracellularElectrodeResistance(ConfiguredBaseModel): + """ + Electrode resistance, in ohms. + """ + None + + +class IntracellularElectrodeSeal(ConfiguredBaseModel): + """ + Information about seal used for recording. + """ + None + + +class IntracellularElectrodeSlice(ConfiguredBaseModel): + """ + Information about slice used for recording. + """ + None + + +class SweepTable(DynamicTable): + """ + [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. + """ + sweep_number: SweepTableSweepNumber = Field(..., description="""Sweep number of the PatchClampSeries in that row.""") + series: SweepTableSeries = Field(..., description="""The PatchClampSeries with the sweep number in that row.""") + series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysSweepTable(SweepTable): + """ + [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata. + """ + sweep_number: SweepTableSweepNumber = Field(..., description="""Sweep number of the PatchClampSeries in that row.""") + series: SweepTableSeries = Field(..., description="""The PatchClampSeries with the sweep number in that row.""") + series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SweepTableSweepNumber(VectorData): + """ + Sweep number of the PatchClampSeries in that row. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class SweepTableSeries(VectorData): + """ + The PatchClampSeries with the sweep number in that row. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class SweepTableSeriesIndex(VectorIndex): + """ + Index for series. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class IntracellularElectrodesTable(DynamicTable): + """ + Table for storing intracellular electrode related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + electrode: IntracellularElectrodesTableElectrode = Field(..., description="""Column for storing the reference to the intracellular electrode.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularElectrodesTableElectrode(VectorData): + """ + Column for storing the reference to the intracellular electrode. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularStimuliTable(DynamicTable): + """ + Table for storing intracellular stimulus related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded stimulus for the recording (rows). + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularResponsesTable(DynamicTable): + """ + Table for storing intracellular response related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded response for the recording (rows) + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularRecordingsTable(AlignedDynamicTable): + """ + A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + """ + description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""") + electrodes: IntracellularRecordingsTableElectrodes = Field(..., description="""Table for storing intracellular electrode related metadata.""") + stimuli: IntracellularRecordingsTableStimuli = Field(..., description="""Table for storing intracellular stimulus related metadata.""") + responses: IntracellularRecordingsTableResponses = Field(..., description="""Table for storing intracellular response related metadata.""") + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysIntracellularRecordings(IntracellularRecordingsTable): + """ + A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + """ + description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""") + electrodes: IntracellularRecordingsTableElectrodes = Field(..., description="""Table for storing intracellular electrode related metadata.""") + stimuli: IntracellularRecordingsTableStimuli = Field(..., description="""Table for storing intracellular stimulus related metadata.""") + responses: IntracellularRecordingsTableResponses = Field(..., description="""Table for storing intracellular response related metadata.""") + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableElectrodes(IntracellularElectrodesTable): + """ + Table for storing intracellular electrode related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + electrode: IntracellularElectrodesTableElectrode = Field(..., description="""Column for storing the reference to the intracellular electrode.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableStimuli(IntracellularStimuliTable): + """ + Table for storing intracellular stimulus related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableResponses(IntracellularResponsesTable): + """ + Table for storing intracellular response related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimultaneousRecordingsTable(DynamicTable): + """ + A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. + """ + recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""") + recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysSimultaneousRecordings(SimultaneousRecordingsTable): + """ + A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes + """ + recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""") + recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimultaneousRecordingsTableRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the IntracellularRecordingsTable table. + """ + table: Optional[IntracellularRecordingsTable] = Field(None, description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class SimultaneousRecordingsTableRecordingsIndex(VectorIndex): + """ + Index dataset for the recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class SequentialRecordingsTable(DynamicTable): + """ + A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + """ + simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""") + simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""") + stimulus_type: SequentialRecordingsTableStimulusType = Field(..., description="""The type of stimulus used for the sequential recording.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysSequentialRecordings(SequentialRecordingsTable): + """ + A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + """ + simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""") + simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""") + stimulus_type: SequentialRecordingsTableStimulusType = Field(..., description="""The type of stimulus used for the sequential recording.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SimultaneousRecordingsTable table. + """ + table: Optional[SimultaneousRecordingsTable] = Field(None, description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): + """ + Index dataset for the simultaneous_recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class SequentialRecordingsTableStimulusType(VectorData): + """ + The type of stimulus used for the sequential recording. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class RepetitionsTable(DynamicTable): + """ + A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + """ + sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""") + sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysRepetitions(RepetitionsTable): + """ + A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + """ + sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""") + sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class RepetitionsTableSequentialRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SequentialRecordingsTable table. + """ + table: Optional[SequentialRecordingsTable] = Field(None, description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class RepetitionsTableSequentialRecordingsIndex(VectorIndex): + """ + Index dataset for the sequential_recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ExperimentalConditionsTable(DynamicTable): + """ + A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. + """ + repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""") + repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class NWBFileGeneralIntracellularEphysExperimentalConditions(ExperimentalConditionsTable): + """ + A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. + """ + repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""") + repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class ExperimentalConditionsTableRepetitions(DynamicTableRegion): + """ + A reference to one or more rows in the RepetitionsTable table. + """ + table: Optional[RepetitionsTable] = Field(None, description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class ExperimentalConditionsTableRepetitionsIndex(VectorIndex): + """ + Index dataset for the repetitions column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElectricalSeries(TimeSeries): + """ + A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels. + """ + filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""") + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") + electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""") + channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""") + array: Optional[ElectricalSeriesDataArray] = Field(None) + + +class ElectricalSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_channels: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class ElectricalSeriesElectrodes(DynamicTableRegion): + """ + DynamicTableRegion pointer to the electrodes that this time series was generated from. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class ElectricalSeriesChannelConversion(ConfiguredBaseModel): + """ + Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels. + """ + axis: Optional[int] = Field(None, description="""The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1.""") + array: Optional[ElectricalSeriesChannelConversionArray] = Field(None) + + +class ElectricalSeriesChannelConversionArray(Arraylike): + + num_channels: float = Field(...) + + +class SpikeEventSeries(ElectricalSeries): + """ + Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). + """ + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") + timestamps: SpikeEventSeriesTimestamps = Field(..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""") + filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""") + electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""") + channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for waveforms, which is fixed to 'volts'.""") + array: Optional[SpikeEventSeriesDataArray] = Field(None) + + +class SpikeEventSeriesDataArray(Arraylike): + + num_events: float = Field(...) + num_samples: float = Field(...) + num_channels: Optional[float] = Field(None) + + +class SpikeEventSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[SpikeEventSeriesTimestampsArray] = Field(None) + + +class SpikeEventSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class FeatureExtraction(NWBDataInterface): + """ + Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. + """ + description: FeatureExtractionDescription = Field(..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""") + features: FeatureExtractionFeatures = Field(..., description="""Multi-dimensional array of features extracted from each event.""") + times: FeatureExtractionTimes = Field(..., description="""Times of events that features correspond to (can be a link).""") + electrodes: FeatureExtractionElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""") + + +class FeatureExtractionDescription(ConfiguredBaseModel): + """ + Description of features (eg, ''PC1'') for each of the extracted features. + """ + array: Optional[FeatureExtractionDescriptionArray] = Field(None) + + +class FeatureExtractionDescriptionArray(Arraylike): + + num_features: str = Field(...) + + +class FeatureExtractionFeatures(ConfiguredBaseModel): + """ + Multi-dimensional array of features extracted from each event. + """ + array: Optional[FeatureExtractionFeaturesArray] = Field(None) + + +class FeatureExtractionFeaturesArray(Arraylike): + + num_events: Optional[float] = Field(None) + num_channels: Optional[float] = Field(None) + num_features: Optional[float] = Field(None) + + +class FeatureExtractionTimes(ConfiguredBaseModel): + """ + Times of events that features correspond to (can be a link). + """ + array: Optional[FeatureExtractionTimesArray] = Field(None) + + +class FeatureExtractionTimesArray(Arraylike): + + num_events: float = Field(...) + + +class FeatureExtractionElectrodes(DynamicTableRegion): + """ + DynamicTableRegion pointer to the electrodes that this time series was generated from. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class EventDetection(NWBDataInterface): + """ + Detected spike events from voltage trace(s). + """ + detection_method: EventDetectionDetectionMethod = Field(..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""") + source_idx: EventDetectionSourceIdx = Field(..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""") + times: EventDetectionTimes = Field(..., description="""Timestamps of events, in seconds.""") + + +class EventDetectionDetectionMethod(ConfiguredBaseModel): + """ + Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values. + """ + None + + +class EventDetectionSourceIdx(ConfiguredBaseModel): + """ + Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data. + """ + array: Optional[EventDetectionSourceIdxArray] = Field(None) + + +class EventDetectionSourceIdxArray(Arraylike): + + num_events: int = Field(...) + + +class EventDetectionTimes(ConfiguredBaseModel): + """ + Timestamps of events, in seconds. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for event times, which is fixed to 'seconds'.""") + array: Optional[EventDetectionTimesArray] = Field(None) + + +class EventDetectionTimesArray(Arraylike): + + num_events: float = Field(...) + + +class EventWaveform(NWBDataInterface): + """ + Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. + """ + SpikeEventSeries: Optional[List[SpikeEventSeries]] = Field(default_factory=list, description="""SpikeEventSeries object(s) containing detected spike event waveforms.""") + + +class FilteredEphys(NWBDataInterface): + """ + Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + """ + ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing filtered electrophysiology data.""") + + +class LFP(NWBDataInterface): + """ + LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + """ + ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing LFP data for one or more channels.""") + + +class ElectrodeGroup(NWBContainer): + """ + A physical grouping of electrodes, e.g. a shank of an array. + """ + description: Optional[str] = Field(None, description="""Description of this electrode group.""") + location: Optional[str] = Field(None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""") + position: Optional[ElectrodeGroupPosition] = Field(None, description="""stereotaxic or common framework coordinates""") + + +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + None + + +class ClusterWaveforms(NWBDataInterface): + """ + DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. + """ + waveform_filtering: ClusterWaveformsWaveformFiltering = Field(..., description="""Filtering applied to data before generating mean/sd""") + waveform_mean: ClusterWaveformsWaveformMean = Field(..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""") + waveform_sd: ClusterWaveformsWaveformSd = Field(..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""") + + +class ClusterWaveformsWaveformFiltering(ConfiguredBaseModel): + """ + Filtering applied to data before generating mean/sd + """ + None + + +class ClusterWaveformsWaveformMean(ConfiguredBaseModel): + """ + The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled) + """ + array: Optional[ClusterWaveformsWaveformMeanArray] = Field(None) + + +class ClusterWaveformsWaveformMeanArray(Arraylike): + + num_clusters: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class ClusterWaveformsWaveformSd(ConfiguredBaseModel): + """ + Stdev of waveforms for each cluster, using the same indices as in mean + """ + array: Optional[ClusterWaveformsWaveformSdArray] = Field(None) + + +class ClusterWaveformsWaveformSdArray(Arraylike): + + num_clusters: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class Clustering(NWBDataInterface): + """ + DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting. + """ + description: ClusteringDescription = Field(..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""") + num: ClusteringNum = Field(..., description="""Cluster number of each event""") + peak_over_rms: ClusteringPeakOverRms = Field(..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""") + times: ClusteringTimes = Field(..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""") + + +class ClusteringDescription(ConfiguredBaseModel): + """ + Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc) + """ + None + + +class ClusteringNum(ConfiguredBaseModel): + """ + Cluster number of each event + """ + array: Optional[ClusteringNumArray] = Field(None) + + +class ClusteringNumArray(Arraylike): + + num_events: int = Field(...) + + +class ClusteringPeakOverRms(ConfiguredBaseModel): + """ + Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric). + """ + array: Optional[ClusteringPeakOverRmsArray] = Field(None) + + +class ClusteringPeakOverRmsArray(Arraylike): + + num_clusters: float = Field(...) + + +class ClusteringTimes(ConfiguredBaseModel): + """ + Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module. + """ + array: Optional[ClusteringTimesArray] = Field(None) + + +class ClusteringTimesArray(Arraylike): + + num_events: float = Field(...) + + +class Device(NWBContainer): + """ + Metadata about a data acquisition device, e.g., recording system, electrode, microscope. + """ + description: Optional[str] = Field(None, description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""") + manufacturer: Optional[str] = Field(None, description="""The name of the manufacturer of the device.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +NWBFileFileCreateDate.update_forward_refs() +NWBFileIdentifier.update_forward_refs() +NWBFileSessionDescription.update_forward_refs() +NWBFileSessionStartTime.update_forward_refs() +NWBFileTimestampsReferenceTime.update_forward_refs() +NWBFileAcquisition.update_forward_refs() +NWBFileAnalysis.update_forward_refs() +NWBFileScratch.update_forward_refs() +NWBFileProcessing.update_forward_refs() +NWBFileStimulus.update_forward_refs() +NWBFileStimulusPresentation.update_forward_refs() +NWBFileStimulusTemplates.update_forward_refs() +NWBFileGeneral.update_forward_refs() +NWBFileGeneralDataCollection.update_forward_refs() +NWBFileGeneralExperimentDescription.update_forward_refs() +NWBFileGeneralExperimenter.update_forward_refs() +NWBFileGeneralInstitution.update_forward_refs() +NWBFileGeneralKeywords.update_forward_refs() +NWBFileGeneralLab.update_forward_refs() +NWBFileGeneralNotes.update_forward_refs() +NWBFileGeneralPharmacology.update_forward_refs() +NWBFileGeneralProtocol.update_forward_refs() +NWBFileGeneralRelatedPublications.update_forward_refs() +NWBFileGeneralSessionId.update_forward_refs() +NWBFileGeneralSlices.update_forward_refs() +NWBFileGeneralSourceScript.update_forward_refs() +NWBFileGeneralStimulus.update_forward_refs() +NWBFileGeneralSurgery.update_forward_refs() +NWBFileGeneralVirus.update_forward_refs() +NWBFileGeneralDevices.update_forward_refs() +NWBFileGeneralExtracellularEphys.update_forward_refs() +NWBFileGeneralIntracellularEphys.update_forward_refs() +NWBFileGeneralIntracellularEphysFiltering.update_forward_refs() +NWBFileGeneralOptogenetics.update_forward_refs() +NWBFileGeneralOptophysiology.update_forward_refs() +NWBFileIntervals.update_forward_refs() +SubjectAge.update_forward_refs() +SubjectDateOfBirth.update_forward_refs() +SubjectDescription.update_forward_refs() +SubjectGenotype.update_forward_refs() +SubjectSex.update_forward_refs() +SubjectSpecies.update_forward_refs() +SubjectStrain.update_forward_refs() +SubjectSubjectId.update_forward_refs() +SubjectWeight.update_forward_refs() +Arraylike.update_forward_refs() +NWBFileFileCreateDateArray.update_forward_refs() +NWBFileGeneralExperimenterArray.update_forward_refs() +NWBFileGeneralKeywordsArray.update_forward_refs() +NWBFileGeneralRelatedPublicationsArray.update_forward_refs() +AbstractFeatureSeriesData.update_forward_refs() +AbstractFeatureSeriesDataArray.update_forward_refs() +AbstractFeatureSeriesFeatureUnits.update_forward_refs() +AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs() +AbstractFeatureSeriesFeatures.update_forward_refs() +AbstractFeatureSeriesFeaturesArray.update_forward_refs() +AnnotationSeriesData.update_forward_refs() +AnnotationSeriesDataArray.update_forward_refs() +IntervalSeriesData.update_forward_refs() +IntervalSeriesDataArray.update_forward_refs() +DecompositionSeriesData.update_forward_refs() +DecompositionSeriesDataArray.update_forward_refs() +DecompositionSeriesMetric.update_forward_refs() +DecompositionSeriesBandsBandLimitsArray.update_forward_refs() +DecompositionSeriesBandsBandMeanArray.update_forward_refs() +DecompositionSeriesBandsBandStdevArray.update_forward_refs() +UnitsObsIntervalsArray.update_forward_refs() +UnitsWaveformMeanArray.update_forward_refs() +UnitsWaveformSdArray.update_forward_refs() +UnitsWaveformsArray.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesX.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesY.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesZ.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesImp.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesLocation.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesFiltering.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesGroup.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesGroupName.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesRelX.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesRelY.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesRelZ.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodesReference.update_forward_refs() +DecompositionSeriesBandsBandName.update_forward_refs() +DecompositionSeriesBandsBandLimits.update_forward_refs() +DecompositionSeriesBandsBandMean.update_forward_refs() +DecompositionSeriesBandsBandStdev.update_forward_refs() +UnitsSpikeTimes.update_forward_refs() +UnitsObsIntervals.update_forward_refs() +UnitsElectrodeGroup.update_forward_refs() +UnitsWaveformMean.update_forward_refs() +UnitsWaveformSd.update_forward_refs() +UnitsWaveforms.update_forward_refs() +VectorIndex.update_forward_refs() +UnitsSpikeTimesIndex.update_forward_refs() +UnitsObsIntervalsIndex.update_forward_refs() +UnitsElectrodesIndex.update_forward_refs() +UnitsWaveformsIndex.update_forward_refs() +UnitsWaveformsIndexIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DecompositionSeriesSourceChannels.update_forward_refs() +UnitsElectrodes.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +NWBFileGeneralExtracellularEphysElectrodes.update_forward_refs() +DecompositionSeriesBands.update_forward_refs() +Units.update_forward_refs() +NWBFileUnits.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +NWBData.update_forward_refs() +ScratchData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +Image.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferences.update_forward_refs() +ImageReferencesArray.update_forward_refs() +NWBContainer.update_forward_refs() +NWBFile.update_forward_refs() +LabMetaData.update_forward_refs() +Subject.update_forward_refs() +NWBFileGeneralSubject.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +AbstractFeatureSeries.update_forward_refs() +AnnotationSeries.update_forward_refs() +IntervalSeries.update_forward_refs() +DecompositionSeries.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +TimeIntervals.update_forward_refs() +NWBFileIntervalsEpochs.update_forward_refs() +NWBFileIntervalsTrials.update_forward_refs() +NWBFileIntervalsInvalidTimes.update_forward_refs() +TimeIntervalsStartTime.update_forward_refs() +TimeIntervalsStopTime.update_forward_refs() +TimeIntervalsTags.update_forward_refs() +TimeIntervalsTagsIndex.update_forward_refs() +TimeIntervalsTimeseries.update_forward_refs() +TimeIntervalsTimeseriesIndex.update_forward_refs() +TwoPhotonSeriesFieldOfView.update_forward_refs() +TwoPhotonSeriesFieldOfViewArray.update_forward_refs() +RoiResponseSeries.update_forward_refs() +RoiResponseSeriesData.update_forward_refs() +RoiResponseSeriesDataArray.update_forward_refs() +RoiResponseSeriesRois.update_forward_refs() +DfOverF.update_forward_refs() +Fluorescence.update_forward_refs() +ImageSegmentation.update_forward_refs() +PlaneSegmentation.update_forward_refs() +PlaneSegmentationImageMask.update_forward_refs() +PlaneSegmentationImageMaskArray.update_forward_refs() +PlaneSegmentationPixelMaskIndex.update_forward_refs() +PlaneSegmentationPixelMask.update_forward_refs() +PlaneSegmentationVoxelMaskIndex.update_forward_refs() +PlaneSegmentationVoxelMask.update_forward_refs() +PlaneSegmentationReferenceImages.update_forward_refs() +ImagingPlane.update_forward_refs() +ImagingPlaneDescription.update_forward_refs() +ImagingPlaneExcitationLambda.update_forward_refs() +ImagingPlaneImagingRate.update_forward_refs() +ImagingPlaneIndicator.update_forward_refs() +ImagingPlaneLocation.update_forward_refs() +ImagingPlaneManifold.update_forward_refs() +ImagingPlaneManifoldArray.update_forward_refs() +ImagingPlaneOriginCoords.update_forward_refs() +ImagingPlaneOriginCoordsArray.update_forward_refs() +ImagingPlaneGridSpacing.update_forward_refs() +ImagingPlaneGridSpacingArray.update_forward_refs() +ImagingPlaneReferenceFrame.update_forward_refs() +OpticalChannel.update_forward_refs() +OpticalChannelDescription.update_forward_refs() +OpticalChannelEmissionLambda.update_forward_refs() +MotionCorrection.update_forward_refs() +CorrectedImageStack.update_forward_refs() +CorrectedImageStackXyTranslation.update_forward_refs() +GrayscaleImage.update_forward_refs() +GrayscaleImageArray.update_forward_refs() +RGBImage.update_forward_refs() +RGBImageArray.update_forward_refs() +RGBAImage.update_forward_refs() +RGBAImageArray.update_forward_refs() +ImageSeries.update_forward_refs() +OnePhotonSeries.update_forward_refs() +TwoPhotonSeries.update_forward_refs() +CorrectedImageStackCorrected.update_forward_refs() +ImageSeriesData.update_forward_refs() +ImageSeriesDataArray.update_forward_refs() +ImageSeriesDimension.update_forward_refs() +ImageSeriesDimensionArray.update_forward_refs() +ImageSeriesExternalFile.update_forward_refs() +ImageSeriesExternalFileArray.update_forward_refs() +ImageSeriesFormat.update_forward_refs() +ImageMaskSeries.update_forward_refs() +OpticalSeries.update_forward_refs() +OpticalSeriesDistance.update_forward_refs() +OpticalSeriesFieldOfView.update_forward_refs() +OpticalSeriesFieldOfViewArray.update_forward_refs() +OpticalSeriesData.update_forward_refs() +OpticalSeriesDataArray.update_forward_refs() +OpticalSeriesOrientation.update_forward_refs() +IndexSeries.update_forward_refs() +IndexSeriesData.update_forward_refs() +IndexSeriesDataArray.update_forward_refs() +OptogeneticSeries.update_forward_refs() +OptogeneticSeriesData.update_forward_refs() +OptogeneticSeriesDataArray.update_forward_refs() +OptogeneticStimulusSite.update_forward_refs() +OptogeneticStimulusSiteDescription.update_forward_refs() +OptogeneticStimulusSiteExcitationLambda.update_forward_refs() +OptogeneticStimulusSiteLocation.update_forward_refs() +PatchClampSeries.update_forward_refs() +PatchClampSeriesData.update_forward_refs() +PatchClampSeriesDataArray.update_forward_refs() +PatchClampSeriesGain.update_forward_refs() +CurrentClampSeries.update_forward_refs() +CurrentClampSeriesData.update_forward_refs() +CurrentClampSeriesBiasCurrent.update_forward_refs() +CurrentClampSeriesBridgeBalance.update_forward_refs() +CurrentClampSeriesCapacitanceCompensation.update_forward_refs() +IZeroClampSeries.update_forward_refs() +IZeroClampSeriesBiasCurrent.update_forward_refs() +IZeroClampSeriesBridgeBalance.update_forward_refs() +IZeroClampSeriesCapacitanceCompensation.update_forward_refs() +CurrentClampStimulusSeries.update_forward_refs() +CurrentClampStimulusSeriesData.update_forward_refs() +VoltageClampSeries.update_forward_refs() +VoltageClampSeriesData.update_forward_refs() +VoltageClampSeriesCapacitanceFast.update_forward_refs() +VoltageClampSeriesCapacitanceSlow.update_forward_refs() +VoltageClampSeriesResistanceCompBandwidth.update_forward_refs() +VoltageClampSeriesResistanceCompCorrection.update_forward_refs() +VoltageClampSeriesResistanceCompPrediction.update_forward_refs() +VoltageClampSeriesWholeCellCapacitanceComp.update_forward_refs() +VoltageClampSeriesWholeCellSeriesResistanceComp.update_forward_refs() +VoltageClampStimulusSeries.update_forward_refs() +VoltageClampStimulusSeriesData.update_forward_refs() +IntracellularElectrode.update_forward_refs() +IntracellularElectrodeCellId.update_forward_refs() +IntracellularElectrodeDescription.update_forward_refs() +IntracellularElectrodeFiltering.update_forward_refs() +IntracellularElectrodeInitialAccessResistance.update_forward_refs() +IntracellularElectrodeLocation.update_forward_refs() +IntracellularElectrodeResistance.update_forward_refs() +IntracellularElectrodeSeal.update_forward_refs() +IntracellularElectrodeSlice.update_forward_refs() +SweepTable.update_forward_refs() +NWBFileGeneralIntracellularEphysSweepTable.update_forward_refs() +SweepTableSweepNumber.update_forward_refs() +SweepTableSeries.update_forward_refs() +SweepTableSeriesIndex.update_forward_refs() +IntracellularElectrodesTable.update_forward_refs() +IntracellularElectrodesTableElectrode.update_forward_refs() +IntracellularStimuliTable.update_forward_refs() +IntracellularStimuliTableStimulus.update_forward_refs() +IntracellularResponsesTable.update_forward_refs() +IntracellularResponsesTableResponse.update_forward_refs() +IntracellularRecordingsTable.update_forward_refs() +NWBFileGeneralIntracellularEphysIntracellularRecordings.update_forward_refs() +IntracellularRecordingsTableElectrodes.update_forward_refs() +IntracellularRecordingsTableStimuli.update_forward_refs() +IntracellularRecordingsTableResponses.update_forward_refs() +SimultaneousRecordingsTable.update_forward_refs() +NWBFileGeneralIntracellularEphysSimultaneousRecordings.update_forward_refs() +SimultaneousRecordingsTableRecordings.update_forward_refs() +SimultaneousRecordingsTableRecordingsIndex.update_forward_refs() +SequentialRecordingsTable.update_forward_refs() +NWBFileGeneralIntracellularEphysSequentialRecordings.update_forward_refs() +SequentialRecordingsTableSimultaneousRecordings.update_forward_refs() +SequentialRecordingsTableSimultaneousRecordingsIndex.update_forward_refs() +SequentialRecordingsTableStimulusType.update_forward_refs() +RepetitionsTable.update_forward_refs() +NWBFileGeneralIntracellularEphysRepetitions.update_forward_refs() +RepetitionsTableSequentialRecordings.update_forward_refs() +RepetitionsTableSequentialRecordingsIndex.update_forward_refs() +ExperimentalConditionsTable.update_forward_refs() +NWBFileGeneralIntracellularEphysExperimentalConditions.update_forward_refs() +ExperimentalConditionsTableRepetitions.update_forward_refs() +ExperimentalConditionsTableRepetitionsIndex.update_forward_refs() +ElectricalSeries.update_forward_refs() +ElectricalSeriesData.update_forward_refs() +ElectricalSeriesDataArray.update_forward_refs() +ElectricalSeriesElectrodes.update_forward_refs() +ElectricalSeriesChannelConversion.update_forward_refs() +ElectricalSeriesChannelConversionArray.update_forward_refs() +SpikeEventSeries.update_forward_refs() +SpikeEventSeriesData.update_forward_refs() +SpikeEventSeriesDataArray.update_forward_refs() +SpikeEventSeriesTimestamps.update_forward_refs() +SpikeEventSeriesTimestampsArray.update_forward_refs() +FeatureExtraction.update_forward_refs() +FeatureExtractionDescription.update_forward_refs() +FeatureExtractionDescriptionArray.update_forward_refs() +FeatureExtractionFeatures.update_forward_refs() +FeatureExtractionFeaturesArray.update_forward_refs() +FeatureExtractionTimes.update_forward_refs() +FeatureExtractionTimesArray.update_forward_refs() +FeatureExtractionElectrodes.update_forward_refs() +EventDetection.update_forward_refs() +EventDetectionDetectionMethod.update_forward_refs() +EventDetectionSourceIdx.update_forward_refs() +EventDetectionSourceIdxArray.update_forward_refs() +EventDetectionTimes.update_forward_refs() +EventDetectionTimesArray.update_forward_refs() +EventWaveform.update_forward_refs() +FilteredEphys.update_forward_refs() +LFP.update_forward_refs() +ElectrodeGroup.update_forward_refs() +ElectrodeGroupPosition.update_forward_refs() +ClusterWaveforms.update_forward_refs() +ClusterWaveformsWaveformFiltering.update_forward_refs() +ClusterWaveformsWaveformMean.update_forward_refs() +ClusterWaveformsWaveformMeanArray.update_forward_refs() +ClusterWaveformsWaveformSd.update_forward_refs() +ClusterWaveformsWaveformSdArray.update_forward_refs() +Clustering.update_forward_refs() +ClusteringDescription.update_forward_refs() +ClusteringNum.update_forward_refs() +ClusteringNumArray.update_forward_refs() +ClusteringPeakOverRms.update_forward_refs() +ClusteringPeakOverRmsArray.update_forward_refs() +ClusteringTimes.update_forward_refs() +ClusteringTimesArray.update_forward_refs() +Device.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.icephys.py b/nwb_linkml/models/core.nwb.icephys.py new file mode 100644 index 0000000..68885cc --- /dev/null +++ b/nwb_linkml/models/core.nwb.icephys.py @@ -0,0 +1,1069 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class PatchClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage or current. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + array: Optional[PatchClampSeriesDataArray] = Field(None) + + +class PatchClampSeriesGain(ConfiguredBaseModel): + """ + Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + """ + None + + +class CurrentClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class CurrentClampSeriesBiasCurrent(ConfiguredBaseModel): + """ + Bias current, in amps. + """ + None + + +class CurrentClampSeriesBridgeBalance(ConfiguredBaseModel): + """ + Bridge balance, in ohms. + """ + None + + +class CurrentClampSeriesCapacitanceCompensation(ConfiguredBaseModel): + """ + Capacitance compensation, in farads. + """ + None + + +class IZeroClampSeriesBiasCurrent(ConfiguredBaseModel): + """ + Bias current, in amps, fixed to 0.0. + """ + None + + +class IZeroClampSeriesBridgeBalance(ConfiguredBaseModel): + """ + Bridge balance, in ohms, fixed to 0.0. + """ + None + + +class IZeroClampSeriesCapacitanceCompensation(ConfiguredBaseModel): + """ + Capacitance compensation, in farads, fixed to 0.0. + """ + None + + +class CurrentClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus current applied. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): + """ + Fast capacitance, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""") + + +class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): + """ + Slow capacitance, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""") + + +class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): + """ + Resistance compensation bandwidth, in hertz. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""") + + +class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): + """ + Resistance compensation correction, in percent. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""") + + +class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): + """ + Resistance compensation prediction, in percent. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""") + + +class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): + """ + Whole cell capacitance compensation, in farads. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""") + + +class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): + """ + Whole cell series resistance compensation, in ohms. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""") + + +class VoltageClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus voltage applied. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + + +class IntracellularElectrodeCellId(ConfiguredBaseModel): + """ + unique ID of the cell + """ + None + + +class IntracellularElectrodeDescription(ConfiguredBaseModel): + """ + Description of electrode (e.g., whole-cell, sharp, etc.). + """ + None + + +class IntracellularElectrodeFiltering(ConfiguredBaseModel): + """ + Electrode specific filtering. + """ + None + + +class IntracellularElectrodeInitialAccessResistance(ConfiguredBaseModel): + """ + Initial access resistance. + """ + None + + +class IntracellularElectrodeLocation(ConfiguredBaseModel): + """ + Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class IntracellularElectrodeResistance(ConfiguredBaseModel): + """ + Electrode resistance, in ohms. + """ + None + + +class IntracellularElectrodeSeal(ConfiguredBaseModel): + """ + Information about seal used for recording. + """ + None + + +class IntracellularElectrodeSlice(ConfiguredBaseModel): + """ + Information about slice used for recording. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class PatchClampSeriesDataArray(Arraylike): + + num_times: float = Field(...) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class SweepTableSweepNumber(VectorData): + """ + Sweep number of the PatchClampSeries in that row. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class SweepTableSeries(VectorData): + """ + The PatchClampSeries with the sweep number in that row. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularElectrodesTableElectrode(VectorData): + """ + Column for storing the reference to the intracellular electrode. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class SequentialRecordingsTableStimulusType(VectorData): + """ + The type of stimulus used for the sequential recording. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class SweepTableSeriesIndex(VectorIndex): + """ + Index for series. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class SimultaneousRecordingsTableRecordingsIndex(VectorIndex): + """ + Index dataset for the recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): + """ + Index dataset for the simultaneous_recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class RepetitionsTableSequentialRecordingsIndex(VectorIndex): + """ + Index dataset for the sequential_recordings column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ExperimentalConditionsTableRepetitionsIndex(VectorIndex): + """ + Index dataset for the repetitions column. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class SimultaneousRecordingsTableRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the IntracellularRecordingsTable table. + """ + table: Optional[IntracellularRecordingsTable] = Field(None, description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SimultaneousRecordingsTable table. + """ + table: Optional[SimultaneousRecordingsTable] = Field(None, description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class RepetitionsTableSequentialRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SequentialRecordingsTable table. + """ + table: Optional[SequentialRecordingsTable] = Field(None, description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class ExperimentalConditionsTableRepetitions(DynamicTableRegion): + """ + A reference to one or more rows in the RepetitionsTable table. + """ + table: Optional[RepetitionsTable] = Field(None, description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SweepTable(DynamicTable): + """ + [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. + """ + sweep_number: SweepTableSweepNumber = Field(..., description="""Sweep number of the PatchClampSeries in that row.""") + series: SweepTableSeries = Field(..., description="""The PatchClampSeries with the sweep number in that row.""") + series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularElectrodesTable(DynamicTable): + """ + Table for storing intracellular electrode related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + electrode: IntracellularElectrodesTableElectrode = Field(..., description="""Column for storing the reference to the intracellular electrode.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularStimuliTable(DynamicTable): + """ + Table for storing intracellular stimulus related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularResponsesTable(DynamicTable): + """ + Table for storing intracellular response related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableElectrodes(IntracellularElectrodesTable): + """ + Table for storing intracellular electrode related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + electrode: IntracellularElectrodesTableElectrode = Field(..., description="""Column for storing the reference to the intracellular electrode.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableStimuli(IntracellularStimuliTable): + """ + Table for storing intracellular stimulus related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTableResponses(IntracellularResponsesTable): + """ + Table for storing intracellular response related metadata. + """ + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimultaneousRecordingsTable(DynamicTable): + """ + A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. + """ + recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""") + recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SequentialRecordingsTable(DynamicTable): + """ + A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + """ + simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""") + simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""") + stimulus_type: SequentialRecordingsTableStimulusType = Field(..., description="""The type of stimulus used for the sequential recording.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class RepetitionsTable(DynamicTable): + """ + A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + """ + sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""") + sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class ExperimentalConditionsTable(DynamicTable): + """ + A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. + """ + repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""") + repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class IntracellularRecordingsTable(AlignedDynamicTable): + """ + A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + """ + description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""") + electrodes: IntracellularRecordingsTableElectrodes = Field(..., description="""Table for storing intracellular electrode related metadata.""") + stimuli: IntracellularRecordingsTableStimuli = Field(..., description="""Table for storing intracellular stimulus related metadata.""") + responses: IntracellularRecordingsTableResponses = Field(..., description="""Table for storing intracellular response related metadata.""") + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded stimulus for the recording (rows). + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded response for the recording (rows) + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class IntracellularElectrode(NWBContainer): + """ + An intracellular electrode and its metadata. + """ + cell_id: Optional[IntracellularElectrodeCellId] = Field(None, description="""unique ID of the cell""") + description: IntracellularElectrodeDescription = Field(..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""") + filtering: Optional[IntracellularElectrodeFiltering] = Field(None, description="""Electrode specific filtering.""") + initial_access_resistance: Optional[IntracellularElectrodeInitialAccessResistance] = Field(None, description="""Initial access resistance.""") + location: Optional[IntracellularElectrodeLocation] = Field(None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + resistance: Optional[IntracellularElectrodeResistance] = Field(None, description="""Electrode resistance, in ohms.""") + seal: Optional[IntracellularElectrodeSeal] = Field(None, description="""Information about seal used for recording.""") + slice: Optional[IntracellularElectrodeSlice] = Field(None, description="""Information about slice used for recording.""") + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class PatchClampSeries(TimeSeries): + """ + An abstract base class for patch-clamp data - stimulus or response, current or voltage. + """ + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CurrentClampSeries(PatchClampSeries): + """ + Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. + """ + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") + bias_current: Optional[CurrentClampSeriesBiasCurrent] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[CurrentClampSeriesBridgeBalance] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[CurrentClampSeriesCapacitanceCompensation] = Field(None, description="""Capacitance compensation, in farads.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IZeroClampSeries(CurrentClampSeries): + """ + Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. + """ + stimulus_description: Optional[str] = Field(None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""") + bias_current: IZeroClampSeriesBiasCurrent = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: IZeroClampSeriesBridgeBalance = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: IZeroClampSeriesCapacitanceCompensation = Field(..., description="""Capacitance compensation, in farads, fixed to 0.0.""") + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CurrentClampStimulusSeries(PatchClampSeries): + """ + Stimulus current applied during current clamp recording. + """ + data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class VoltageClampSeries(PatchClampSeries): + """ + Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. + """ + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") + capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(None, description="""Fast capacitance, in farads.""") + capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(None, description="""Slow capacitance, in farads.""") + resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(None, description="""Resistance compensation bandwidth, in hertz.""") + resistance_comp_correction: Optional[VoltageClampSeriesResistanceCompCorrection] = Field(None, description="""Resistance compensation correction, in percent.""") + resistance_comp_prediction: Optional[VoltageClampSeriesResistanceCompPrediction] = Field(None, description="""Resistance compensation prediction, in percent.""") + whole_cell_capacitance_comp: Optional[VoltageClampSeriesWholeCellCapacitanceComp] = Field(None, description="""Whole cell capacitance compensation, in farads.""") + whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = Field(None, description="""Whole cell series resistance compensation, in ohms.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class VoltageClampStimulusSeries(PatchClampSeries): + """ + Stimulus voltage applied during a voltage clamp recording. + """ + data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") + stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""") + sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""") + gain: Optional[PatchClampSeriesGain] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +PatchClampSeriesData.update_forward_refs() +PatchClampSeriesGain.update_forward_refs() +CurrentClampSeriesData.update_forward_refs() +CurrentClampSeriesBiasCurrent.update_forward_refs() +CurrentClampSeriesBridgeBalance.update_forward_refs() +CurrentClampSeriesCapacitanceCompensation.update_forward_refs() +IZeroClampSeriesBiasCurrent.update_forward_refs() +IZeroClampSeriesBridgeBalance.update_forward_refs() +IZeroClampSeriesCapacitanceCompensation.update_forward_refs() +CurrentClampStimulusSeriesData.update_forward_refs() +VoltageClampSeriesData.update_forward_refs() +VoltageClampSeriesCapacitanceFast.update_forward_refs() +VoltageClampSeriesCapacitanceSlow.update_forward_refs() +VoltageClampSeriesResistanceCompBandwidth.update_forward_refs() +VoltageClampSeriesResistanceCompCorrection.update_forward_refs() +VoltageClampSeriesResistanceCompPrediction.update_forward_refs() +VoltageClampSeriesWholeCellCapacitanceComp.update_forward_refs() +VoltageClampSeriesWholeCellSeriesResistanceComp.update_forward_refs() +VoltageClampStimulusSeriesData.update_forward_refs() +IntracellularElectrodeCellId.update_forward_refs() +IntracellularElectrodeDescription.update_forward_refs() +IntracellularElectrodeFiltering.update_forward_refs() +IntracellularElectrodeInitialAccessResistance.update_forward_refs() +IntracellularElectrodeLocation.update_forward_refs() +IntracellularElectrodeResistance.update_forward_refs() +IntracellularElectrodeSeal.update_forward_refs() +IntracellularElectrodeSlice.update_forward_refs() +Arraylike.update_forward_refs() +PatchClampSeriesDataArray.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +SweepTableSweepNumber.update_forward_refs() +SweepTableSeries.update_forward_refs() +IntracellularElectrodesTableElectrode.update_forward_refs() +SequentialRecordingsTableStimulusType.update_forward_refs() +VectorIndex.update_forward_refs() +SweepTableSeriesIndex.update_forward_refs() +SimultaneousRecordingsTableRecordingsIndex.update_forward_refs() +SequentialRecordingsTableSimultaneousRecordingsIndex.update_forward_refs() +RepetitionsTableSequentialRecordingsIndex.update_forward_refs() +ExperimentalConditionsTableRepetitionsIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +SimultaneousRecordingsTableRecordings.update_forward_refs() +SequentialRecordingsTableSimultaneousRecordings.update_forward_refs() +RepetitionsTableSequentialRecordings.update_forward_refs() +ExperimentalConditionsTableRepetitions.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +SweepTable.update_forward_refs() +IntracellularElectrodesTable.update_forward_refs() +IntracellularStimuliTable.update_forward_refs() +IntracellularResponsesTable.update_forward_refs() +IntracellularRecordingsTableElectrodes.update_forward_refs() +IntracellularRecordingsTableStimuli.update_forward_refs() +IntracellularRecordingsTableResponses.update_forward_refs() +SimultaneousRecordingsTable.update_forward_refs() +SequentialRecordingsTable.update_forward_refs() +RepetitionsTable.update_forward_refs() +ExperimentalConditionsTable.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +IntracellularRecordingsTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +NWBData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +IntracellularStimuliTableStimulus.update_forward_refs() +IntracellularResponsesTableResponse.update_forward_refs() +Image.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferences.update_forward_refs() +ImageReferencesArray.update_forward_refs() +NWBContainer.update_forward_refs() +IntracellularElectrode.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +PatchClampSeries.update_forward_refs() +CurrentClampSeries.update_forward_refs() +IZeroClampSeries.update_forward_refs() +CurrentClampStimulusSeries.update_forward_refs() +VoltageClampSeries.update_forward_refs() +VoltageClampStimulusSeries.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.image.py b/nwb_linkml/models/core.nwb.image.py new file mode 100644 index 0000000..adc8afb --- /dev/null +++ b/nwb_linkml/models/core.nwb.image.py @@ -0,0 +1,656 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + array: Optional[ImageSeriesDataArray] = Field(None) + + +class ImageSeriesDimension(ConfiguredBaseModel): + """ + Number of pixels on x, y, (and z) axes. + """ + array: Optional[ImageSeriesDimensionArray] = Field(None) + + +class ImageSeriesExternalFile(ConfiguredBaseModel): + """ + Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + """ + starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""") + array: Optional[ImageSeriesExternalFileArray] = Field(None) + + +class ImageSeriesFormat(ConfiguredBaseModel): + """ + Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + """ + None + + +class OpticalSeriesDistance(ConfiguredBaseModel): + """ + Distance from camera/monitor to target/eye. + """ + None + + +class OpticalSeriesFieldOfView(ConfiguredBaseModel): + """ + Width, height and depth of image, or imaged area, in meters. + """ + array: Optional[OpticalSeriesFieldOfViewArray] = Field(None) + + +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + array: Optional[OpticalSeriesDataArray] = Field(None) + + +class OpticalSeriesOrientation(ConfiguredBaseModel): + """ + Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. + """ + None + + +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""") + array: Optional[IndexSeriesDataArray] = Field(None) + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class GrayscaleImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + + +class RGBImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b: Optional[float] = Field(None) + + +class RGBAImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + z: Optional[float] = Field(None) + + +class ImageSeriesDimensionArray(Arraylike): + + rank: int = Field(...) + + +class ImageSeriesExternalFileArray(Arraylike): + + num_files: str = Field(...) + + +class OpticalSeriesFieldOfViewArray(Arraylike): + + width_height: Optional[float] = Field(None) + width_height_depth: Optional[float] = Field(None) + + +class OpticalSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + + +class IndexSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class GrayscaleImage(Image): + """ + A grayscale image. + """ + array: Optional[GrayscaleImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBImage(Image): + """ + A color image. + """ + array: Optional[RGBImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBAImage(Image): + """ + A color image with transparency. + """ + array: Optional[RGBAImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ImageSeries(TimeSeries): + """ + General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ImageMaskSeries(ImageSeries): + """ + An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OpticalSeries(ImageSeries): + """ + Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. + """ + distance: Optional[OpticalSeriesDistance] = Field(None, description="""Distance from camera/monitor to target/eye.""") + field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""") + orientation: Optional[OpticalSeriesOrientation] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IndexSeries(TimeSeries): + """ + Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. + """ + data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +ImageSeriesData.update_forward_refs() +ImageSeriesDimension.update_forward_refs() +ImageSeriesExternalFile.update_forward_refs() +ImageSeriesFormat.update_forward_refs() +OpticalSeriesDistance.update_forward_refs() +OpticalSeriesFieldOfView.update_forward_refs() +OpticalSeriesData.update_forward_refs() +OpticalSeriesOrientation.update_forward_refs() +IndexSeriesData.update_forward_refs() +Arraylike.update_forward_refs() +GrayscaleImageArray.update_forward_refs() +RGBImageArray.update_forward_refs() +RGBAImageArray.update_forward_refs() +ImageSeriesDataArray.update_forward_refs() +ImageSeriesDimensionArray.update_forward_refs() +ImageSeriesExternalFileArray.update_forward_refs() +OpticalSeriesFieldOfViewArray.update_forward_refs() +OpticalSeriesDataArray.update_forward_refs() +IndexSeriesDataArray.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferencesArray.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +NWBData.update_forward_refs() +Image.update_forward_refs() +GrayscaleImage.update_forward_refs() +RGBImage.update_forward_refs() +RGBAImage.update_forward_refs() +ImageReferences.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +VectorData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +VectorIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +NWBContainer.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +ImageSeries.update_forward_refs() +ImageMaskSeries.update_forward_refs() +OpticalSeries.update_forward_refs() +IndexSeries.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +DynamicTable.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.misc.py b/nwb_linkml/models/core.nwb.misc.py new file mode 100644 index 0000000..ef1f38f --- /dev/null +++ b/nwb_linkml/models/core.nwb.misc.py @@ -0,0 +1,831 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class AbstractFeatureSeriesData(ConfiguredBaseModel): + """ + Values of each feature at each time. + """ + unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""") + array: Optional[AbstractFeatureSeriesDataArray] = Field(None) + + +class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel): + """ + Units of each feature. + """ + array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None) + + +class AbstractFeatureSeriesFeatures(ConfiguredBaseModel): + """ + Description of the features represented in TimeSeries::data. + """ + array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None) + + +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[AnnotationSeriesDataArray] = Field(None) + + +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""") + array: Optional[IntervalSeriesDataArray] = Field(None) + + +class DecompositionSeriesData(ConfiguredBaseModel): + """ + Data decomposed into frequency bands. + """ + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""") + array: Optional[DecompositionSeriesDataArray] = Field(None) + + +class DecompositionSeriesMetric(ConfiguredBaseModel): + """ + The metric used, e.g. phase, amplitude, power. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class AbstractFeatureSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_features: Optional[float] = Field(None) + + +class AbstractFeatureSeriesFeatureUnitsArray(Arraylike): + + num_features: str = Field(...) + + +class AbstractFeatureSeriesFeaturesArray(Arraylike): + + num_features: str = Field(...) + + +class AnnotationSeriesDataArray(Arraylike): + + num_times: str = Field(...) + + +class IntervalSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + +class DecompositionSeriesDataArray(Arraylike): + + num_times: Optional[float] = Field(None) + num_channels: Optional[float] = Field(None) + num_bands: Optional[float] = Field(None) + + +class DecompositionSeriesBandsBandLimitsArray(Arraylike): + + num_bands: Optional[float] = Field(None) + low_high: Optional[float] = Field(None) + + +class DecompositionSeriesBandsBandMeanArray(Arraylike): + + num_bands: float = Field(...) + + +class DecompositionSeriesBandsBandStdevArray(Arraylike): + + num_bands: float = Field(...) + + +class UnitsObsIntervalsArray(Arraylike): + + num_intervals: Optional[float] = Field(None) + start|end: Optional[float] = Field(None) + + +class UnitsWaveformMeanArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformSdArray(Arraylike): + + num_units: float = Field(...) + num_samples: float = Field(...) + num_electrodes: Optional[float] = Field(None) + + +class UnitsWaveformsArray(Arraylike): + + num_waveforms: Optional[float] = Field(None) + num_samples: Optional[float] = Field(None) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandName(VectorData): + """ + Name of the band, e.g. theta. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class DecompositionSeriesBandsBandLimits(VectorData): + """ + Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center. + """ + array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandMean(VectorData): + """ + The mean Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class DecompositionSeriesBandsBandStdev(VectorData): + """ + The standard deviation of Gaussian filters, in Hz. + """ + array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimes(VectorData): + """ + Spike times for each unit in seconds. + """ + resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""") + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsObsIntervals(VectorData): + """ + Observation intervals for each unit. + """ + array: Optional[UnitsObsIntervalsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodeGroup(VectorData): + """ + Electrode group that each spike unit came from. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformMeanArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformSdArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""") + array: Optional[UnitsWaveformsArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsSpikeTimesIndex(VectorIndex): + """ + Index into the spike_times dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsObsIntervalsIndex(VectorIndex): + """ + Index into the obs_intervals dataset. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsElectrodesIndex(VectorIndex): + """ + Index into electrodes. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndex(VectorIndex): + """ + Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class UnitsWaveformsIndexIndex(VectorIndex): + """ + Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DecompositionSeriesSourceChannels(DynamicTableRegion): + """ + DynamicTableRegion pointer to the channels that this decomposition series was generated from. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class UnitsElectrodes(DynamicTableRegion): + """ + Electrode that each spike unit came from, specified using a DynamicTableRegion. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class DecompositionSeriesBands(DynamicTable): + """ + Table for describing the bands that this series was generated from. There should be one row in this table for each band. + """ + band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""") + band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""") + band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""") + band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class Units(DynamicTable): + """ + Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. + """ + spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""") + spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""") + obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""") + obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""") + electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""") + electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""") + electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""") + waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""") + waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""") + waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""") + waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AbstractFeatureSeries(TimeSeries): + """ + Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. + """ + data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""") + feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""") + features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class AnnotationSeries(TimeSeries): + """ + Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. + """ + data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IntervalSeries(TimeSeries): + """ + Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. + """ + data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class DecompositionSeries(TimeSeries): + """ + Spectral analysis of a time series, e.g. of an LFP or a speech signal. + """ + data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""") + metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""") + bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +AbstractFeatureSeriesData.update_forward_refs() +AbstractFeatureSeriesFeatureUnits.update_forward_refs() +AbstractFeatureSeriesFeatures.update_forward_refs() +AnnotationSeriesData.update_forward_refs() +IntervalSeriesData.update_forward_refs() +DecompositionSeriesData.update_forward_refs() +DecompositionSeriesMetric.update_forward_refs() +Arraylike.update_forward_refs() +AbstractFeatureSeriesDataArray.update_forward_refs() +AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs() +AbstractFeatureSeriesFeaturesArray.update_forward_refs() +AnnotationSeriesDataArray.update_forward_refs() +IntervalSeriesDataArray.update_forward_refs() +DecompositionSeriesDataArray.update_forward_refs() +DecompositionSeriesBandsBandLimitsArray.update_forward_refs() +DecompositionSeriesBandsBandMeanArray.update_forward_refs() +DecompositionSeriesBandsBandStdevArray.update_forward_refs() +UnitsObsIntervalsArray.update_forward_refs() +UnitsWaveformMeanArray.update_forward_refs() +UnitsWaveformSdArray.update_forward_refs() +UnitsWaveformsArray.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +DecompositionSeriesBandsBandName.update_forward_refs() +DecompositionSeriesBandsBandLimits.update_forward_refs() +DecompositionSeriesBandsBandMean.update_forward_refs() +DecompositionSeriesBandsBandStdev.update_forward_refs() +UnitsSpikeTimes.update_forward_refs() +UnitsObsIntervals.update_forward_refs() +UnitsElectrodeGroup.update_forward_refs() +UnitsWaveformMean.update_forward_refs() +UnitsWaveformSd.update_forward_refs() +UnitsWaveforms.update_forward_refs() +VectorIndex.update_forward_refs() +UnitsSpikeTimesIndex.update_forward_refs() +UnitsObsIntervalsIndex.update_forward_refs() +UnitsElectrodesIndex.update_forward_refs() +UnitsWaveformsIndex.update_forward_refs() +UnitsWaveformsIndexIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DecompositionSeriesSourceChannels.update_forward_refs() +UnitsElectrodes.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +DecompositionSeriesBands.update_forward_refs() +Units.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +NWBData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +Image.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferences.update_forward_refs() +ImageReferencesArray.update_forward_refs() +NWBContainer.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +AbstractFeatureSeries.update_forward_refs() +AnnotationSeries.update_forward_refs() +IntervalSeries.update_forward_refs() +DecompositionSeries.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.ogen.py b/nwb_linkml/models/core.nwb.ogen.py new file mode 100644 index 0000000..643e45f --- /dev/null +++ b/nwb_linkml/models/core.nwb.ogen.py @@ -0,0 +1,475 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + unit: Optional[str] = Field(None, description="""Unit of measurement for data, which is fixed to 'watts'.""") + array: Optional[OptogeneticSeriesDataArray] = Field(None) + + +class OptogeneticStimulusSiteDescription(ConfiguredBaseModel): + """ + Description of stimulation site. + """ + None + + +class OptogeneticStimulusSiteExcitationLambda(ConfiguredBaseModel): + """ + Excitation wavelength, in nm. + """ + None + + +class OptogeneticStimulusSiteLocation(ConfiguredBaseModel): + """ + Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class OptogeneticSeriesDataArray(Arraylike): + + num_times: float = Field(...) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class OptogeneticStimulusSite(NWBContainer): + """ + A site of optogenetic stimulation. + """ + description: OptogeneticStimulusSiteDescription = Field(..., description="""Description of stimulation site.""") + excitation_lambda: OptogeneticStimulusSiteExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""") + location: OptogeneticStimulusSiteLocation = Field(..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OptogeneticSeries(TimeSeries): + """ + An optogenetic stimulus. + """ + data: OptogeneticSeriesData = Field(..., description="""Applied power for optogenetic stimulus, in watts.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +OptogeneticSeriesData.update_forward_refs() +OptogeneticStimulusSiteDescription.update_forward_refs() +OptogeneticStimulusSiteExcitationLambda.update_forward_refs() +OptogeneticStimulusSiteLocation.update_forward_refs() +Arraylike.update_forward_refs() +OptogeneticSeriesDataArray.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferencesArray.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +NWBData.update_forward_refs() +Image.update_forward_refs() +ImageReferences.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +VectorData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +VectorIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +NWBContainer.update_forward_refs() +OptogeneticStimulusSite.update_forward_refs() +NWBDataInterface.update_forward_refs() +TimeSeries.update_forward_refs() +OptogeneticSeries.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +DynamicTable.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/core.nwb.ophys.py b/nwb_linkml/models/core.nwb.ophys.py new file mode 100644 index 0000000..fba460c --- /dev/null +++ b/nwb_linkml/models/core.nwb.ophys.py @@ -0,0 +1,1053 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class TwoPhotonSeriesFieldOfView(ConfiguredBaseModel): + """ + Width, height and depth of image, or imaged area, in meters. + """ + array: Optional[TwoPhotonSeriesFieldOfViewArray] = Field(None) + + +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + array: Optional[RoiResponseSeriesDataArray] = Field(None) + + +class PlaneSegmentationReferenceImages(ConfiguredBaseModel): + """ + Image stacks that the segmentation masks apply to. + """ + ImageSeries: Optional[List[ImageSeries]] = Field(default_factory=list, description="""One or more image stacks that the masks apply to (can be one-element stack).""") + + +class ImagingPlaneDescription(ConfiguredBaseModel): + """ + Description of the imaging plane. + """ + None + + +class ImagingPlaneExcitationLambda(ConfiguredBaseModel): + """ + Excitation wavelength, in nm. + """ + None + + +class ImagingPlaneImagingRate(ConfiguredBaseModel): + """ + Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. + """ + None + + +class ImagingPlaneIndicator(ConfiguredBaseModel): + """ + Calcium indicator. + """ + None + + +class ImagingPlaneLocation(ConfiguredBaseModel): + """ + Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + """ + None + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'.""") + array: Optional[ImagingPlaneManifoldArray] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + unit: Optional[str] = Field(None, description="""Measurement units for origin_coords. The default value is 'meters'.""") + array: Optional[ImagingPlaneOriginCoordsArray] = Field(None) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + unit: Optional[str] = Field(None, description="""Measurement units for grid_spacing. The default value is 'meters'.""") + array: Optional[ImagingPlaneGridSpacingArray] = Field(None) + + +class ImagingPlaneReferenceFrame(ConfiguredBaseModel): + """ + Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\" + """ + None + + +class OpticalChannelDescription(ConfiguredBaseModel): + """ + Description or other notes about the channel. + """ + None + + +class OpticalChannelEmissionLambda(ConfiguredBaseModel): + """ + Emission wavelength for channel, in nm. + """ + None + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class TwoPhotonSeriesFieldOfViewArray(Arraylike): + + width|height: Optional[float] = Field(None) + width|height|depth: Optional[float] = Field(None) + + +class RoiResponseSeriesDataArray(Arraylike): + + num_times: float = Field(...) + num_ROIs: Optional[float] = Field(None) + + +class PlaneSegmentationImageMaskArray(Arraylike): + + num_roi: Any = Field(...) + num_x: Any = Field(...) + num_y: Any = Field(...) + num_z: Optional[Any] = Field(None) + + +class ImagingPlaneManifoldArray(Arraylike): + + height: float = Field(...) + width: float = Field(...) + x_y_z: float = Field(...) + depth: Optional[float] = Field(None) + + +class ImagingPlaneOriginCoordsArray(Arraylike): + + x_y: Optional[float] = Field(None) + x_y_z: Optional[float] = Field(None) + + +class ImagingPlaneGridSpacingArray(Arraylike): + + x_y: Optional[float] = Field(None) + x_y_z: Optional[float] = Field(None) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class PlaneSegmentationImageMask(VectorData): + """ + ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero. + """ + array: Optional[PlaneSegmentationImageMaskArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationPixelMaskIndex(VectorIndex): + """ + Index into pixel_mask. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class PlaneSegmentationVoxelMaskIndex(VectorIndex): + """ + Index into voxel_mask. + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class RoiResponseSeriesRois(DynamicTableRegion): + """ + DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class PlaneSegmentation(DynamicTable): + """ + Results from image segmentation of a specific imaging plane. + """ + image_mask: Optional[PlaneSegmentationImageMask] = Field(None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""") + pixel_mask_index: Optional[PlaneSegmentationPixelMaskIndex] = Field(None, description="""Index into pixel_mask.""") + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""") + voxel_mask_index: Optional[PlaneSegmentationVoxelMaskIndex] = Field(None, description="""Index into voxel_mask.""") + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""") + reference_images: PlaneSegmentationReferenceImages = Field(..., description="""Image stacks that the segmentation masks apply to.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + None + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + array: Optional[ImageArray] = Field(None) + + +class ImageArray(Arraylike): + + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class ImageReferencesArray(Arraylike): + + num_images: Image = Field(...) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class ImagingPlane(NWBContainer): + """ + An imaging plane and its metadata. + """ + description: Optional[ImagingPlaneDescription] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: ImagingPlaneExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[ImagingPlaneImagingRate] = Field(None, description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""") + indicator: ImagingPlaneIndicator = Field(..., description="""Calcium indicator.""") + location: ImagingPlaneLocation = Field(..., description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""") + manifold: Optional[ImagingPlaneManifold] = Field(None, description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""") + origin_coords: Optional[ImagingPlaneOriginCoords] = Field(None, description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""") + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(None, description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""") + reference_frame: Optional[ImagingPlaneReferenceFrame] = Field(None, description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""") + OpticalChannel: List[OpticalChannel] = Field(default_factory=list, description="""An optical channel used to record from an imaging plane.""") + + +class OpticalChannel(NWBContainer): + """ + An optical channel used to record from an imaging plane. + """ + description: OpticalChannelDescription = Field(..., description="""Description or other notes about the channel.""") + emission_lambda: OpticalChannelEmissionLambda = Field(..., description="""Emission wavelength for channel, in nm.""") + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + None + + +class DfOverF(NWBDataInterface): + """ + dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). + """ + RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing dF/F for a ROI.""") + + +class Fluorescence(NWBDataInterface): + """ + Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). + """ + RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing fluorescence data for a ROI.""") + + +class ImageSegmentation(NWBDataInterface): + """ + Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. + """ + PlaneSegmentation: List[PlaneSegmentation] = Field(default_factory=list, description="""Results from image segmentation of a specific imaging plane.""") + + +class MotionCorrection(NWBDataInterface): + """ + An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). + """ + CorrectedImageStack: List[CorrectedImageStack] = Field(default_factory=list, description="""Reuslts from motion correction of an image stack.""") + + +class CorrectedImageStack(NWBDataInterface): + """ + Reuslts from motion correction of an image stack. + """ + corrected: CorrectedImageStackCorrected = Field(..., description="""Image stack with frames shifted to the common coordinates.""") + xy_translation: CorrectedImageStackXyTranslation = Field(..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""") + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class RoiResponseSeries(TimeSeries): + """ + ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. + """ + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") + rois: RoiResponseSeriesRois = Field(..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CorrectedImageStackXyTranslation(TimeSeries): + """ + Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image. + """ + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""") + offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""") + resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""") + unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""") + continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""") + array: Optional[TimeSeriesDataArray] = Field(None) + + +class TimeSeriesDataArray(Arraylike): + + num_times: Any = Field(...) + num_DIM2: Optional[Any] = Field(None) + num_DIM3: Optional[Any] = Field(None) + num_DIM4: Optional[Any] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""") + + +class TimeSeriesTimestamps(ConfiguredBaseModel): + """ + Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + """ + interval: Optional[int] = Field(None, description="""Value is '1'""") + unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""") + array: Optional[TimeSeriesTimestampsArray] = Field(None) + + +class TimeSeriesTimestampsArray(Arraylike): + + num_times: float = Field(...) + + +class TimeSeriesControl(ConfiguredBaseModel): + """ + Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + """ + array: Optional[TimeSeriesControlArray] = Field(None) + + +class TimeSeriesControlArray(Arraylike): + + num_times: int = Field(...) + + +class TimeSeriesControlDescription(ConfiguredBaseModel): + """ + Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + """ + array: Optional[TimeSeriesControlDescriptionArray] = Field(None) + + +class TimeSeriesControlDescriptionArray(Arraylike): + + num_control_values: str = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + None + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + description: Optional[str] = Field(None, description="""Description of this collection of processed data.""") + NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""") + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + description: Optional[str] = Field(None, description="""Description of this collection of images.""") + Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""") + order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""") + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + array: Optional[ImageReferencesArray] = Field(None) + + +class GrayscaleImage(Image): + """ + A grayscale image. + """ + array: Optional[GrayscaleImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class GrayscaleImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + + +class RGBImage(Image): + """ + A color image. + """ + array: Optional[RGBImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b: Optional[float] = Field(None) + + +class RGBAImage(Image): + """ + A color image with transparency. + """ + array: Optional[RGBAImageArray] = Field(None) + resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""") + description: Optional[str] = Field(None, description="""Description of the image.""") + + +class RGBAImageArray(Arraylike): + + x: Optional[float] = Field(None) + y: Optional[float] = Field(None) + r_g_b_a: Optional[float] = Field(None) + + +class ImageSeries(TimeSeries): + """ + General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OnePhotonSeries(ImageSeries): + """ + Image stack recorded over time from 1-photon microscope. + """ + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""") + exposure_time: Optional[float] = Field(None, description="""Exposure time of the sample; often the inverse of the frequency.""") + binning: Optional[int] = Field(None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""") + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + intensity: Optional[float] = Field(None, description="""Intensity of the excitation in mW/mm^2, if known.""") + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class TwoPhotonSeries(ImageSeries): + """ + Image stack recorded over time from 2-photon microscope. + """ + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""") + field_of_view: Optional[TwoPhotonSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class CorrectedImageStackCorrected(ImageSeries): + """ + Image stack with frames shifted to the common coordinates. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + array: Optional[ImageSeriesDataArray] = Field(None) + + +class ImageSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + z: Optional[float] = Field(None) + + +class ImageSeriesDimension(ConfiguredBaseModel): + """ + Number of pixels on x, y, (and z) axes. + """ + array: Optional[ImageSeriesDimensionArray] = Field(None) + + +class ImageSeriesDimensionArray(Arraylike): + + rank: int = Field(...) + + +class ImageSeriesExternalFile(ConfiguredBaseModel): + """ + Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + """ + starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""") + array: Optional[ImageSeriesExternalFileArray] = Field(None) + + +class ImageSeriesExternalFileArray(Arraylike): + + num_files: str = Field(...) + + +class ImageSeriesFormat(ConfiguredBaseModel): + """ + Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + """ + None + + +class ImageMaskSeries(ImageSeries): + """ + An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. + """ + data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OpticalSeries(ImageSeries): + """ + Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. + """ + distance: Optional[OpticalSeriesDistance] = Field(None, description="""Distance from camera/monitor to target/eye.""") + field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""") + orientation: Optional[OpticalSeriesOrientation] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""") + dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""") + external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""") + format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class OpticalSeriesDistance(ConfiguredBaseModel): + """ + Distance from camera/monitor to target/eye. + """ + None + + +class OpticalSeriesFieldOfView(ConfiguredBaseModel): + """ + Width, height and depth of image, or imaged area, in meters. + """ + array: Optional[OpticalSeriesFieldOfViewArray] = Field(None) + + +class OpticalSeriesFieldOfViewArray(Arraylike): + + width_height: Optional[float] = Field(None) + width_height_depth: Optional[float] = Field(None) + + +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + array: Optional[OpticalSeriesDataArray] = Field(None) + + +class OpticalSeriesDataArray(Arraylike): + + frame: float = Field(...) + x: float = Field(...) + y: float = Field(...) + r_g_b: Optional[float] = Field(None) + + +class OpticalSeriesOrientation(ConfiguredBaseModel): + """ + Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. + """ + None + + +class IndexSeries(TimeSeries): + """ + Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. + """ + data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""") + description: Optional[str] = Field(None, description="""Description of the time series.""") + comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""") + starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""") + timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""") + control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""") + control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""") + sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""") + + +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""") + array: Optional[IndexSeriesDataArray] = Field(None) + + +class IndexSeriesDataArray(Arraylike): + + num_times: int = Field(...) + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +TwoPhotonSeriesFieldOfView.update_forward_refs() +RoiResponseSeriesData.update_forward_refs() +PlaneSegmentationReferenceImages.update_forward_refs() +ImagingPlaneDescription.update_forward_refs() +ImagingPlaneExcitationLambda.update_forward_refs() +ImagingPlaneImagingRate.update_forward_refs() +ImagingPlaneIndicator.update_forward_refs() +ImagingPlaneLocation.update_forward_refs() +ImagingPlaneManifold.update_forward_refs() +ImagingPlaneOriginCoords.update_forward_refs() +ImagingPlaneGridSpacing.update_forward_refs() +ImagingPlaneReferenceFrame.update_forward_refs() +OpticalChannelDescription.update_forward_refs() +OpticalChannelEmissionLambda.update_forward_refs() +Arraylike.update_forward_refs() +TwoPhotonSeriesFieldOfViewArray.update_forward_refs() +RoiResponseSeriesDataArray.update_forward_refs() +PlaneSegmentationImageMaskArray.update_forward_refs() +ImagingPlaneManifoldArray.update_forward_refs() +ImagingPlaneOriginCoordsArray.update_forward_refs() +ImagingPlaneGridSpacingArray.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +PlaneSegmentationImageMask.update_forward_refs() +PlaneSegmentationPixelMask.update_forward_refs() +PlaneSegmentationVoxelMask.update_forward_refs() +VectorIndex.update_forward_refs() +PlaneSegmentationPixelMaskIndex.update_forward_refs() +PlaneSegmentationVoxelMaskIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +RoiResponseSeriesRois.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +PlaneSegmentation.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +NWBData.update_forward_refs() +TimeSeriesReferenceVectorData.update_forward_refs() +Image.update_forward_refs() +ImageArray.update_forward_refs() +ImageReferences.update_forward_refs() +ImageReferencesArray.update_forward_refs() +NWBContainer.update_forward_refs() +ImagingPlane.update_forward_refs() +OpticalChannel.update_forward_refs() +NWBDataInterface.update_forward_refs() +DfOverF.update_forward_refs() +Fluorescence.update_forward_refs() +ImageSegmentation.update_forward_refs() +MotionCorrection.update_forward_refs() +CorrectedImageStack.update_forward_refs() +TimeSeries.update_forward_refs() +RoiResponseSeries.update_forward_refs() +CorrectedImageStackXyTranslation.update_forward_refs() +TimeSeriesData.update_forward_refs() +TimeSeriesDataArray.update_forward_refs() +TimeSeriesStartingTime.update_forward_refs() +TimeSeriesTimestamps.update_forward_refs() +TimeSeriesTimestampsArray.update_forward_refs() +TimeSeriesControl.update_forward_refs() +TimeSeriesControlArray.update_forward_refs() +TimeSeriesControlDescription.update_forward_refs() +TimeSeriesControlDescriptionArray.update_forward_refs() +TimeSeriesSync.update_forward_refs() +ProcessingModule.update_forward_refs() +Images.update_forward_refs() +ImagesOrderOfImages.update_forward_refs() +GrayscaleImage.update_forward_refs() +GrayscaleImageArray.update_forward_refs() +RGBImage.update_forward_refs() +RGBImageArray.update_forward_refs() +RGBAImage.update_forward_refs() +RGBAImageArray.update_forward_refs() +ImageSeries.update_forward_refs() +OnePhotonSeries.update_forward_refs() +TwoPhotonSeries.update_forward_refs() +CorrectedImageStackCorrected.update_forward_refs() +ImageSeriesData.update_forward_refs() +ImageSeriesDataArray.update_forward_refs() +ImageSeriesDimension.update_forward_refs() +ImageSeriesDimensionArray.update_forward_refs() +ImageSeriesExternalFile.update_forward_refs() +ImageSeriesExternalFileArray.update_forward_refs() +ImageSeriesFormat.update_forward_refs() +ImageMaskSeries.update_forward_refs() +OpticalSeries.update_forward_refs() +OpticalSeriesDistance.update_forward_refs() +OpticalSeriesFieldOfView.update_forward_refs() +OpticalSeriesFieldOfViewArray.update_forward_refs() +OpticalSeriesData.update_forward_refs() +OpticalSeriesDataArray.update_forward_refs() +OpticalSeriesOrientation.update_forward_refs() +IndexSeries.update_forward_refs() +IndexSeriesData.update_forward_refs() +IndexSeriesDataArray.update_forward_refs() diff --git a/nwb_linkml/models/hdmf-common.py b/nwb_linkml/models/hdmf-common.py new file mode 100644 index 0000000..3c3df0b --- /dev/null +++ b/nwb_linkml/models/hdmf-common.py @@ -0,0 +1,272 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "1.8.0" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + The column indices. + """ + array: Optional[CSRMatrixIndicesArray] = Field(None) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + The row index pointer. + """ + array: Optional[CSRMatrixIndptrArray] = Field(None) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + array: Optional[CSRMatrixDataArray] = Field(None) + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class CSRMatrixIndicesArray(Arraylike): + + number_of_non_zero_values: int = Field(...) + + +class CSRMatrixIndptrArray(Arraylike): + + number_of_rows_in_the_matrix_+_1: int = Field(...) + + +class CSRMatrixDataArray(Arraylike): + + number_of_non_zero_values: Any = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""") + indices: CSRMatrixIndices = Field(..., description="""The column indices.""") + indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""") + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +CSRMatrixIndices.update_forward_refs() +CSRMatrixIndptr.update_forward_refs() +CSRMatrixData.update_forward_refs() +Arraylike.update_forward_refs() +CSRMatrixIndicesArray.update_forward_refs() +CSRMatrixIndptrArray.update_forward_refs() +CSRMatrixDataArray.update_forward_refs() +Data.update_forward_refs() +Container.update_forward_refs() +CSRMatrix.update_forward_refs() +SimpleMultiContainer.update_forward_refs() +VectorData.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndex.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiers.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTable.update_forward_refs() +DynamicTableId.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +AlignedDynamicTable.update_forward_refs() diff --git a/nwb_linkml/models/hdmf-common.sparse.py b/nwb_linkml/models/hdmf-common.sparse.py new file mode 100644 index 0000000..5b46b17 --- /dev/null +++ b/nwb_linkml/models/hdmf-common.sparse.py @@ -0,0 +1,170 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + The column indices. + """ + array: Optional[CSRMatrixIndicesArray] = Field(None) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + The row index pointer. + """ + array: Optional[CSRMatrixIndptrArray] = Field(None) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + array: Optional[CSRMatrixDataArray] = Field(None) + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class CSRMatrixIndicesArray(Arraylike): + + number_of_non_zero_values: int = Field(...) + + +class CSRMatrixIndptrArray(Arraylike): + + number_of_rows_in_the_matrix_+_1: int = Field(...) + + +class CSRMatrixDataArray(Arraylike): + + number_of_non_zero_values: Any = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""") + indices: CSRMatrixIndices = Field(..., description="""The column indices.""") + indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""") + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +CSRMatrixIndices.update_forward_refs() +CSRMatrixIndptr.update_forward_refs() +CSRMatrixData.update_forward_refs() +Arraylike.update_forward_refs() +CSRMatrixIndicesArray.update_forward_refs() +CSRMatrixIndptrArray.update_forward_refs() +CSRMatrixDataArray.update_forward_refs() +Data.update_forward_refs() +Container.update_forward_refs() +CSRMatrix.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/hdmf-experimental.experimental.py b/nwb_linkml/models/hdmf-experimental.experimental.py new file mode 100644 index 0000000..87172e3 --- /dev/null +++ b/nwb_linkml/models/hdmf-experimental.experimental.py @@ -0,0 +1,229 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class VectorDataArray(Arraylike): + + dim0: Any = Field(...) + dim1: Optional[Any] = Field(None) + dim2: Optional[Any] = Field(None) + dim3: Optional[Any] = Field(None) + + +class VectorIndexArray(Arraylike): + + num_rows: int = Field(...) + + +class ElementIdentifiersArray(Arraylike): + + num_elements: int = Field(...) + + +class DynamicTableRegionArray(Arraylike): + + num_rows: int = Field(...) + + +class DynamicTableIdArray(Arraylike): + + num_rows: int = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""") + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + array: Optional[VectorDataArray] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""") + array: Optional[VectorIndexArray] = Field(None) + description: Optional[str] = Field(None, description="""Description of what these vectors represent.""") + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + array: Optional[ElementIdentifiersArray] = Field(None) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""") + description: Optional[str] = Field(None, description="""Description of what this table region points to.""") + array: Optional[DynamicTableRegionArray] = Field(None) + + +class DynamicTableId(ElementIdentifiers): + """ + Array of unique identifiers for the rows of this dynamic table. + """ + array: Optional[DynamicTableIdArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""") + DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""") + colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""") + description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""") + id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""") + VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +Arraylike.update_forward_refs() +VectorDataArray.update_forward_refs() +VectorIndexArray.update_forward_refs() +ElementIdentifiersArray.update_forward_refs() +DynamicTableRegionArray.update_forward_refs() +DynamicTableIdArray.update_forward_refs() +Data.update_forward_refs() +VectorData.update_forward_refs() +EnumData.update_forward_refs() +VectorIndex.update_forward_refs() +ElementIdentifiers.update_forward_refs() +DynamicTableRegion.update_forward_refs() +DynamicTableId.update_forward_refs() +Container.update_forward_refs() +DynamicTable.update_forward_refs() +AlignedDynamicTable.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/hdmf-experimental.resources.py b/nwb_linkml/models/hdmf-experimental.resources.py new file mode 100644 index 0000000..75fce1b --- /dev/null +++ b/nwb_linkml/models/hdmf-experimental.resources.py @@ -0,0 +1,214 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + +class HERDKeysArray(Arraylike): + + num_rows: Any = Field(...) + + +class HERDFilesArray(Arraylike): + + num_rows: Any = Field(...) + + +class HERDEntitiesArray(Arraylike): + + num_rows: Any = Field(...) + + +class HERDObjectsArray(Arraylike): + + num_rows: Any = Field(...) + + +class HERDObjectKeysArray(Arraylike): + + num_rows: Any = Field(...) + + +class HERDEntityKeysArray(Arraylike): + + num_rows: Any = Field(...) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + None + + +class HERDKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + array: Optional[HERDKeysArray] = Field(None) + + +class HERDFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + array: Optional[HERDFilesArray] = Field(None) + + +class HERDEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + array: Optional[HERDEntitiesArray] = Field(None) + + +class HERDObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + array: Optional[HERDObjectsArray] = Field(None) + + +class HERDObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + array: Optional[HERDObjectKeysArray] = Field(None) + + +class HERDEntityKeys(Data): + """ + A table for identifying which keys use which entity. + """ + array: Optional[HERDEntityKeysArray] = Field(None) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + None + + +class HERD(Container): + """ + HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files. + """ + keys: HERDKeys = Field(, description="""A table for storing user terms that are used to refer to external resources.""") + files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""") + entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""") + objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""") + object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""") + entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""") + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""") + Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""") + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +Arraylike.update_forward_refs() +HERDKeysArray.update_forward_refs() +HERDFilesArray.update_forward_refs() +HERDEntitiesArray.update_forward_refs() +HERDObjectsArray.update_forward_refs() +HERDObjectKeysArray.update_forward_refs() +HERDEntityKeysArray.update_forward_refs() +Data.update_forward_refs() +HERDKeys.update_forward_refs() +HERDFiles.update_forward_refs() +HERDEntities.update_forward_refs() +HERDObjects.update_forward_refs() +HERDObjectKeys.update_forward_refs() +HERDEntityKeys.update_forward_refs() +Container.update_forward_refs() +HERD.update_forward_refs() +SimpleMultiContainer.update_forward_refs() diff --git a/nwb_linkml/models/nwb.language.py b/nwb_linkml/models/nwb.language.py new file mode 100644 index 0000000..5da9a24 --- /dev/null +++ b/nwb_linkml/models/nwb.language.py @@ -0,0 +1,92 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import List, Dict, Optional, Any, Union +from pydantic import BaseModel as BaseModel, Field +import sys +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +metamodel_version = "None" +version = "None" + +class WeakRefShimBaseModel(BaseModel): + __slots__ = '__weakref__' + +class ConfiguredBaseModel(WeakRefShimBaseModel, + validate_assignment = True, + validate_all = True, + underscore_attrs_are_private = True, + extra = 'forbid', + arbitrary_types_allowed = True, + use_enum_values = True): + pass + + +class FlatDType(str, Enum): + + + float = "float" + + float32 = "float32" + + double = "double" + + float64 = "float64" + + long = "long" + + int64 = "int64" + + int = "int" + + int32 = "int32" + + int16 = "int16" + + short = "short" + + int8 = "int8" + + uint = "uint" + + uint32 = "uint32" + + uint16 = "uint16" + + uint8 = "uint8" + + uint64 = "uint64" + + numeric = "numeric" + + text = "text" + + utf = "utf" + + utf8 = "utf8" + + utf_8 = "utf_8" + + ascii = "ascii" + + bool = "bool" + + isodatetime = "isodatetime" + + + +class Arraylike(ConfiguredBaseModel): + """ + Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. + """ + None + + + +# Update forward refs +# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/ +Arraylike.update_forward_refs() diff --git a/nwb_linkml/namespaces.py b/nwb_linkml/namespaces.py index f6578c8..eed6226 100644 --- a/nwb_linkml/namespaces.py +++ b/nwb_linkml/namespaces.py @@ -154,8 +154,14 @@ class GitRepo: self.cleanup() else: if not self.check(): - warnings.warn('Destination directory is not empty and does not pass checks for correctness! pass force to overwrite') - return + warnings.warn('Destination directory is not empty and does not pass checks for correctness! cleaning up') + self.cleanup() + else: + # already have it + return + elif self.temp_directory.exists(): + # exists but empty + self.cleanup() res = subprocess.run(['git', 'clone', str(self.namespace.repository), str(self.temp_directory)]) if res.returncode != 0: diff --git a/nwb_linkml/plot.py b/nwb_linkml/plot.py new file mode 100644 index 0000000..cd451d2 --- /dev/null +++ b/nwb_linkml/plot.py @@ -0,0 +1,170 @@ +""" +Various visualization routines, mostly to help development for now +""" +from typing import TYPE_CHECKING, Optional, List, TypedDict, Union +from rich import print +import random + +from dash import Dash, html +import dash_cytoscape as cyto +cyto.load_extra_layouts() + +from nwb_schema_language import Namespace, Group, Dataset +from nwb_linkml.io import load_nwb_core + +if TYPE_CHECKING: + from nwb_linkml.adapters import NamespacesAdapter + # from nwb_schema_language.datamodel import Namespaces + +class _CytoNode(TypedDict): + id: str + label: str + +class _CytoEdge(TypedDict): + source: str + target: str + +class CytoElement(TypedDict): + data: _CytoEdge | _CytoNode + classes: Optional[str] + +class Node: + def __init__(self, + id: str, + label: str, + klass: str, + parent: Optional[str] = None): + self.id = id + self.label = label + self.parent = parent + self.klass = klass + + def make(self) -> List[CytoElement]: + + node = [ + CytoElement(data= _CytoNode(id=self.id, label=self.label), classes=self.klass) + ] + if self.parent: + edge = [ + CytoElement(data=_CytoEdge(source=self.parent, target=self.id)) + ] + node += edge + + return node + +def make_node(element: Group | Dataset, parent=None, recurse:bool=True) -> List[Node]: + if element.neurodata_type_def is None: + if element.name is None: + if element.neurodata_type_inc is None: + name = 'anonymous' + else: + name = element.neurodata_type_inc + else: + name = element.name + id = name + '-' + str(random.randint(0,1000)) + label = id + classname = str(type(element).__name__).lower() + '-child' + else: + id = element.neurodata_type_def + label = element.neurodata_type_def + classname = str(type(element).__name__).lower() + + if parent is None: + parent = element.neurodata_type_inc + + + node = Node( + id=id, + label=label, + parent=parent, + klass=classname + ) + nodes = [node] + + if isinstance(element, Group) and recurse: + for group in element.groups: + nodes += make_node(group, parent=id) + for dataset in element.datasets: + nodes += make_node(dataset, parent=id) + return nodes + + +def make_graph(namespaces: 'NamespacesAdapter', recurse:bool=True) -> List[CytoElement]: + namespaces.populate_imports() + nodes = [] + element: Namespace | Group | Dataset + print('walking graph') + i = 0 + for element in namespaces.walk_types(namespaces, (Group, Dataset)): + if element.neurodata_type_def is None: + # skip child nodes at top level, we'll get them in recursion + continue + if any([element.neurodata_type_def == node.id for node in nodes]): + continue + nodes.extend(make_node(element, recurse=recurse)) + + print('making elements') + cytoelements = [] + for node in nodes: + cytoelements += node.make() + print(cytoelements) + return cytoelements + +def plot_dependency_graph(namespaces: 'NamespacesAdapter', recurse:bool=True) -> Dash: + graph = make_graph(namespaces, recurse=recurse) + + app = Dash(__name__) + + styles = [ + { + 'selector': 'node', + 'style': { + 'content': 'data(label)' + } + }, + { + 'selector': '.dataset', + 'style': { + 'background-color': 'red', + 'shape': 'rectangle' + } + }, + { + 'selector': '.group', + 'style': { + 'background-color': 'blue', + 'shape': 'rectangle' + } + }, + { + 'selector': '.dataset-child', + 'style': { + 'background-color': 'red' + } + }, + { + 'selector': '.group-child', + 'style': { + 'background-color': 'blue' + } + } + ] + + app.layout = html.Div([ + cyto.Cytoscape( + id='nwb_graph', + elements = graph, + style={'width': '100%', 'height': '100vh'}, + layout= {'name': 'klay', 'rankDir': 'LR'}, + stylesheet=styles + ) + ]) + return app + +if __name__ == "__main__": + core = load_nwb_core() + app = plot_dependency_graph(core, recurse=True) + print('opening dash') + app.run(debug=True) + + diff --git a/nwb_linkml/schema/core.nwb.base.yaml b/nwb_linkml/schema/core.nwb.base.yaml new file mode 100644 index 0000000..4f401c8 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.base.yaml @@ -0,0 +1,359 @@ +name: core.nwb.base +id: core.nwb.base +imports: +- hdmf-common.base +- hdmf-common.table +- nwb.language +default_prefix: core.nwb.base/ +classes: + NWBData: + name: NWBData + description: An abstract data type for a dataset. + is_a: Data + TimeSeriesReferenceVectorData: + name: TimeSeriesReferenceVectorData + description: Column storing references to a TimeSeries (rows). For each TimeSeries + this VectorData column stores the start_index and count to indicate the range + in time to be selected as well as an object reference to the TimeSeries. + is_a: VectorData + Image: + name: Image + description: An abstract data type for an image. Shape can be 2-D (x, y), or 3-D + where the third dimension can have three or four elements, e.g. (x, y, (r, g, + b)) or (x, y, (r, g, b, a)). + is_a: NWBData + attributes: + resolution: + name: resolution + description: Pixel resolution of the image, in pixels per centimeter. + range: float32 + description: + name: description + description: Description of the image. + range: text + array: + name: array + range: Image_Array + Image_Array: + name: Image_Array + is_a: Arraylike + attributes: + x: + name: x + range: numeric + required: true + y: + name: y + range: numeric + required: true + r, g, b: + name: r, g, b + range: numeric + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + r, g, b, a: + name: r, g, b, a + range: numeric + required: false + minimum_cardinality: 4 + maximum_cardinality: 4 + ImageReferences: + name: ImageReferences + description: Ordered dataset of references to Image objects. + is_a: NWBData + attributes: + array: + name: array + range: ImageReferences_Array + ImageReferences_Array: + name: ImageReferences_Array + is_a: Arraylike + attributes: + num_images: + name: num_images + range: Image + required: true + NWBContainer: + name: NWBContainer + description: An abstract data type for a generic container storing collections + of data and metadata. Base type for all data and metadata containers. + is_a: Container + NWBDataInterface: + name: NWBDataInterface + description: An abstract data type for a generic container storing collections + of data, as opposed to metadata. + is_a: NWBContainer + TimeSeries: + name: TimeSeries + description: General purpose time series. + is_a: NWBDataInterface + attributes: + description: + name: description + description: Description of the time series. + range: text + comments: + name: comments + description: Human-readable comments about the TimeSeries. This second descriptive + field can be used to store additional information, or descriptive information + if the primary description field is populated with a computer-readable string. + range: text + data: + name: data + description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first + dimension should always represent time. This can also be used to store binary + data (e.g., image frames). This can also be a link to data stored in an + external file. + multivalued: false + range: TimeSeries_data + required: true + starting_time: + name: starting_time + description: Timestamp of the first sample in seconds. When timestamps are + uniformly spaced, the timestamp of the first sample can be specified and + all subsequent ones calculated from the sampling rate attribute. + multivalued: false + range: TimeSeries_starting_time + required: false + timestamps: + name: timestamps + description: Timestamps for samples stored in data, in seconds, relative to + the common experiment master-clock stored in NWBFile.timestamps_reference_time. + multivalued: false + range: TimeSeries_timestamps + required: false + control: + name: control + description: Numerical labels that apply to each time point in data for the + purpose of querying and slicing data by these values. If present, the length + of this array should be the same size as the first dimension of data. + multivalued: false + range: TimeSeries_control + required: false + control_description: + name: control_description + description: Description of each control value. Must be present if control + is present. If present, control_description[0] should describe time points + where control == 0. + multivalued: false + range: TimeSeries_control_description + required: false + sync: + name: sync + description: Lab-specific time and sync information as provided directly from + hardware devices and that is necessary for aligning all acquired time information + to a common timebase. The timestamp array stores time in the common timebase. + This group will usually only be populated in TimeSeries that are stored + external to the NWB file, in files storing raw data. Once timestamp data + is calculated, the contents of 'sync' are mostly for archival purposes. + multivalued: false + range: TimeSeries_sync + required: false + TimeSeries_data: + name: TimeSeries_data + description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension + should always represent time. This can also be used to store binary data (e.g., + image frames). This can also be a link to data stored in an external file. + attributes: + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + range: float32 + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + array: + name: array + range: TimeSeries_data_Array + TimeSeries_data_Array: + name: TimeSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: AnyType + required: true + num_DIM2: + name: num_DIM2 + range: AnyType + required: false + num_DIM3: + name: num_DIM3 + range: AnyType + required: false + num_DIM4: + name: num_DIM4 + range: AnyType + required: false + TimeSeries_starting_time: + name: TimeSeries_starting_time + description: Timestamp of the first sample in seconds. When timestamps are uniformly + spaced, the timestamp of the first sample can be specified and all subsequent + ones calculated from the sampling rate attribute. + attributes: + rate: + name: rate + description: Sampling rate, in Hz. + range: float32 + unit: + name: unit + description: Unit of measurement for time, which is fixed to 'seconds'. + range: text + TimeSeries_timestamps: + name: TimeSeries_timestamps + description: Timestamps for samples stored in data, in seconds, relative to the + common experiment master-clock stored in NWBFile.timestamps_reference_time. + attributes: + interval: + name: interval + description: Value is '1' + range: int32 + unit: + name: unit + description: Unit of measurement for timestamps, which is fixed to 'seconds'. + range: text + array: + name: array + range: TimeSeries_timestamps_Array + TimeSeries_timestamps_Array: + name: TimeSeries_timestamps_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: float64 + required: true + TimeSeries_control: + name: TimeSeries_control + description: Numerical labels that apply to each time point in data for the purpose + of querying and slicing data by these values. If present, the length of this + array should be the same size as the first dimension of data. + attributes: + array: + name: array + range: TimeSeries_control_Array + TimeSeries_control_Array: + name: TimeSeries_control_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: uint8 + required: true + TimeSeries_control_description: + name: TimeSeries_control_description + description: Description of each control value. Must be present if control is + present. If present, control_description[0] should describe time points where + control == 0. + attributes: + array: + name: array + range: TimeSeries_control_description_Array + TimeSeries_control_description_Array: + name: TimeSeries_control_description_Array + is_a: Arraylike + attributes: + num_control_values: + name: num_control_values + range: text + required: true + TimeSeries_sync: + name: TimeSeries_sync + description: Lab-specific time and sync information as provided directly from + hardware devices and that is necessary for aligning all acquired time information + to a common timebase. The timestamp array stores time in the common timebase. + This group will usually only be populated in TimeSeries that are stored external + to the NWB file, in files storing raw data. Once timestamp data is calculated, + the contents of 'sync' are mostly for archival purposes. + ProcessingModule: + name: ProcessingModule + description: A collection of processed data. + is_a: NWBContainer + attributes: + description: + name: description + description: Description of this collection of processed data. + range: text + NWBDataInterface: + name: NWBDataInterface + description: Data objects stored in this collection. + multivalued: true + range: NWBDataInterface + required: false + DynamicTable: + name: DynamicTable + description: Tables stored in this collection. + multivalued: true + range: DynamicTable + required: false + Images: + name: Images + description: A collection of images with an optional way to specify the order + of the images using the "order_of_images" dataset. An order must be specified + if the images are referenced by index, e.g., from an IndexSeries. + is_a: NWBDataInterface + attributes: + description: + name: description + description: Description of this collection of images. + range: text + Image: + name: Image + description: Images stored in this collection. + multivalued: true + range: Image + required: true + order_of_images: + name: order_of_images + description: Ordered dataset of references to Image objects stored in the + parent group. Each Image object in the Images group should be stored once + and only once, so the dataset should have the same length as the number + of images. + multivalued: false + range: Images_order_of_images + required: false + Images_order_of_images: + name: Images_order_of_images + description: Ordered dataset of references to Image objects stored in the parent + group. Each Image object in the Images group should be stored once and only + once, so the dataset should have the same length as the number of images. + is_a: ImageReferences diff --git a/nwb_linkml/schema/core.nwb.behavior.yaml b/nwb_linkml/schema/core.nwb.behavior.yaml new file mode 100644 index 0000000..6085844 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.behavior.yaml @@ -0,0 +1,172 @@ +name: core.nwb.behavior +id: core.nwb.behavior +imports: +- core.nwb.base +- core.nwb.misc +- nwb.language +default_prefix: core.nwb.behavior/ +classes: + SpatialSeries: + name: SpatialSeries + description: 'Direction, e.g., of gaze or travel, or position. The TimeSeries::data + field is a 2D array storing position or direction relative to some reference + frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries + has a text dataset reference_frame that indicates the zero-position, or the + zero-axes for direction. For example, if representing gaze direction, ''straight-ahead'' + might be a specific pixel on the monitor, or some other point in space. For + position data, the 0,0 point might be the top-left corner of an enclosure, as + viewed from the tracking camera. The unit of data will indicate how to interpret + SpatialSeries values.' + is_a: TimeSeries + attributes: + data: + name: data + description: 1-D or 2-D array storing position or direction relative to some + reference frame. + multivalued: false + range: SpatialSeries_data + required: true + reference_frame: + name: reference_frame + description: Description defining what exactly 'straight-ahead' means. + multivalued: false + range: SpatialSeries_reference_frame + required: false + SpatialSeries_data: + name: SpatialSeries_data + description: 1-D or 2-D array storing position or direction relative to some reference + frame. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + array: + name: array + range: SpatialSeries_data_Array + SpatialSeries_data_Array: + name: SpatialSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + x: + name: x + range: numeric + required: false + minimum_cardinality: 1 + maximum_cardinality: 1 + x,y: + name: x,y + range: numeric + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + x,y,z: + name: x,y,z + range: numeric + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + SpatialSeries_reference_frame: + name: SpatialSeries_reference_frame + description: Description defining what exactly 'straight-ahead' means. + BehavioralEpochs: + name: BehavioralEpochs + description: TimeSeries for storing behavioral epochs. The objective of this + and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) + is to provide generic hooks for software tools/scripts. This allows a tool/script + to take the output one specific interface (e.g., UnitTimes) and plot that data + relative to another data modality (e.g., behavioral events) without having to + define all possible modalities in advance. Declaring one of these interfaces + means that one or more TimeSeries of the specified type is published. These + TimeSeries should reside in a group having the same name as the interface. For + example, if a BehavioralTimeSeries interface is declared, the module will have + one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. + BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular + events. BehavioralTimeSeries is for continuous data. + is_a: NWBDataInterface + attributes: + IntervalSeries: + name: IntervalSeries + description: IntervalSeries object containing start and stop times of epochs. + multivalued: true + range: IntervalSeries + required: false + BehavioralEvents: + name: BehavioralEvents + description: TimeSeries for storing behavioral events. See description of BehavioralEpochs + for more details. + is_a: NWBDataInterface + attributes: + TimeSeries: + name: TimeSeries + description: TimeSeries object containing behavioral events. + multivalued: true + range: TimeSeries + required: false + BehavioralTimeSeries: + name: BehavioralTimeSeries + description: TimeSeries for storing Behavoioral time series data. See description + of BehavioralEpochs for more details. + is_a: NWBDataInterface + attributes: + TimeSeries: + name: TimeSeries + description: TimeSeries object containing continuous behavioral data. + multivalued: true + range: TimeSeries + required: false + PupilTracking: + name: PupilTracking + description: Eye-tracking data, representing pupil size. + is_a: NWBDataInterface + attributes: + TimeSeries: + name: TimeSeries + description: TimeSeries object containing time series data on pupil size. + multivalued: true + range: TimeSeries + required: true + EyeTracking: + name: EyeTracking + description: Eye-tracking data, representing direction of gaze. + is_a: NWBDataInterface + attributes: + SpatialSeries: + name: SpatialSeries + description: SpatialSeries object containing data measuring direction of gaze. + multivalued: true + range: SpatialSeries + required: false + CompassDirection: + name: CompassDirection + description: With a CompassDirection interface, a module publishes a SpatialSeries + object representing a floating point value for theta. The SpatialSeries::reference_frame + field should indicate what direction corresponds to 0 and which is the direction + of rotation (this should be clockwise). The si_unit for the SpatialSeries should + be radians or degrees. + is_a: NWBDataInterface + attributes: + SpatialSeries: + name: SpatialSeries + description: SpatialSeries object containing direction of gaze travel. + multivalued: true + range: SpatialSeries + required: false + Position: + name: Position + description: Position data, whether along the x, x/y or x/y/z axis. + is_a: NWBDataInterface + attributes: + SpatialSeries: + name: SpatialSeries + description: SpatialSeries object containing position data. + multivalued: true + range: SpatialSeries + required: true diff --git a/nwb_linkml/schema/core.nwb.device.yaml b/nwb_linkml/schema/core.nwb.device.yaml new file mode 100644 index 0000000..211924a --- /dev/null +++ b/nwb_linkml/schema/core.nwb.device.yaml @@ -0,0 +1,22 @@ +name: core.nwb.device +id: core.nwb.device +imports: +- core.nwb.base +- nwb.language +default_prefix: core.nwb.device/ +classes: + Device: + name: Device + description: Metadata about a data acquisition device, e.g., recording system, + electrode, microscope. + is_a: NWBContainer + attributes: + description: + name: description + description: Description of the device (e.g., model, firmware version, processing + software version, etc.) as free-form text. + range: text + manufacturer: + name: manufacturer + description: The name of the manufacturer of the device. + range: text diff --git a/nwb_linkml/schema/core.nwb.ecephys.yaml b/nwb_linkml/schema/core.nwb.ecephys.yaml new file mode 100644 index 0000000..0ee3375 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.ecephys.yaml @@ -0,0 +1,588 @@ +name: core.nwb.ecephys +id: core.nwb.ecephys +imports: +- core.nwb.base +- hdmf-common.table +- nwb.language +default_prefix: core.nwb.ecephys/ +classes: + ElectricalSeries: + name: ElectricalSeries + description: A time series of acquired voltage data from extracellular recordings. + The data field is an int or float array storing data in volts. The first dimension + should always represent time. The second dimension, if present, should represent + channels. + is_a: TimeSeries + attributes: + filtering: + name: filtering + description: Filtering applied to all channels of the data. For example, if + this ElectricalSeries represents high-pass-filtered data (also known as + AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 + Hz". If this ElectricalSeries represents low-pass-filtered LFP data and + the type of filter is unknown, then this value could be "Low-pass filter + at 300 Hz". If a non-standard filter type is used, provide as much detail + about the filter properties as possible. + range: text + data: + name: data + description: Recorded voltage data. + multivalued: false + range: ElectricalSeries_data + required: true + electrodes: + name: electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + multivalued: false + range: ElectricalSeries_electrodes + required: true + channel_conversion: + name: channel_conversion + description: Channel-specific conversion factor. Multiply the data in the + 'data' dataset by these values along the channel axis (as indicated by axis + attribute) AND by the global conversion factor in the 'conversion' attribute + of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion + * channel_conversion. This approach allows for both global and per-channel + data conversion factors needed to support the storage of electrical recordings + as native values generated by data acquisition systems. If this dataset + is not present, then there is no channel-specific conversion factor, i.e. + it is 1 for all channels. + multivalued: false + range: ElectricalSeries_channel_conversion + required: false + ElectricalSeries_data: + name: ElectricalSeries_data + description: Recorded voltage data. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion', + followed by 'channel_conversion' (if present), and then add 'offset'. + range: text + array: + name: array + range: ElectricalSeries_data_Array + ElectricalSeries_data_Array: + name: ElectricalSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + num_channels: + name: num_channels + range: numeric + required: false + num_samples: + name: num_samples + range: numeric + required: false + ElectricalSeries_electrodes: + name: ElectricalSeries_electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + is_a: DynamicTableRegion + ElectricalSeries_channel_conversion: + name: ElectricalSeries_channel_conversion + description: Channel-specific conversion factor. Multiply the data in the 'data' + dataset by these values along the channel axis (as indicated by axis attribute) + AND by the global conversion factor in the 'conversion' attribute of 'data' + to get the data values in Volts, i.e, data in Volts = data * data.conversion + * channel_conversion. This approach allows for both global and per-channel data + conversion factors needed to support the storage of electrical recordings as + native values generated by data acquisition systems. If this dataset is not + present, then there is no channel-specific conversion factor, i.e. it is 1 for + all channels. + attributes: + axis: + name: axis + description: The zero-indexed axis of the 'data' dataset that the channel-specific + conversion factor corresponds to. This value is fixed to 1. + range: int32 + array: + name: array + range: ElectricalSeries_channel_conversion_Array + ElectricalSeries_channel_conversion_Array: + name: ElectricalSeries_channel_conversion_Array + is_a: Arraylike + attributes: + num_channels: + name: num_channels + range: float32 + required: true + SpikeEventSeries: + name: SpikeEventSeries + description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold + crossings). This may also be raw data, as reported by ephys hardware. If so, + the TimeSeries::description field should describe how events were detected. + All SpikeEventSeries should reside in a module (under EventWaveform interface) + even if the spikes were reported and stored by hardware. All events span the + same recording channels and store snapshots of equal duration. TimeSeries::data + array structure: [num events] [num channels] [num samples] (or [num events] + [num samples] for single electrode).' + is_a: ElectricalSeries + attributes: + data: + name: data + description: Spike waveforms. + multivalued: false + range: SpikeEventSeries_data + required: true + timestamps: + name: timestamps + description: Timestamps for samples stored in data, in seconds, relative to + the common experiment master-clock stored in NWBFile.timestamps_reference_time. + Timestamps are required for the events. Unlike for TimeSeries, timestamps + are required for SpikeEventSeries and are thus re-specified here. + multivalued: false + range: SpikeEventSeries_timestamps + required: true + SpikeEventSeries_data: + name: SpikeEventSeries_data + description: Spike waveforms. + attributes: + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + range: text + array: + name: array + range: SpikeEventSeries_data_Array + SpikeEventSeries_data_Array: + name: SpikeEventSeries_data_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: numeric + required: true + num_samples: + name: num_samples + range: numeric + required: true + num_channels: + name: num_channels + range: numeric + required: false + SpikeEventSeries_timestamps: + name: SpikeEventSeries_timestamps + description: Timestamps for samples stored in data, in seconds, relative to the + common experiment master-clock stored in NWBFile.timestamps_reference_time. + Timestamps are required for the events. Unlike for TimeSeries, timestamps are + required for SpikeEventSeries and are thus re-specified here. + attributes: + interval: + name: interval + description: Value is '1' + range: int32 + unit: + name: unit + description: Unit of measurement for timestamps, which is fixed to 'seconds'. + range: text + array: + name: array + range: SpikeEventSeries_timestamps_Array + SpikeEventSeries_timestamps_Array: + name: SpikeEventSeries_timestamps_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: float64 + required: true + FeatureExtraction: + name: FeatureExtraction + description: Features, such as PC1 and PC2, that are extracted from signals stored + in a SpikeEventSeries or other source. + is_a: NWBDataInterface + attributes: + description: + name: description + description: Description of features (eg, ''PC1'') for each of the extracted + features. + multivalued: false + range: FeatureExtraction_description + required: true + features: + name: features + description: Multi-dimensional array of features extracted from each event. + multivalued: false + range: FeatureExtraction_features + required: true + times: + name: times + description: Times of events that features correspond to (can be a link). + multivalued: false + range: FeatureExtraction_times + required: true + electrodes: + name: electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + multivalued: false + range: FeatureExtraction_electrodes + required: true + FeatureExtraction_description: + name: FeatureExtraction_description + description: Description of features (eg, ''PC1'') for each of the extracted features. + attributes: + array: + name: array + range: FeatureExtraction_description_Array + FeatureExtraction_description_Array: + name: FeatureExtraction_description_Array + is_a: Arraylike + attributes: + num_features: + name: num_features + range: text + required: true + FeatureExtraction_features: + name: FeatureExtraction_features + description: Multi-dimensional array of features extracted from each event. + attributes: + array: + name: array + range: FeatureExtraction_features_Array + FeatureExtraction_features_Array: + name: FeatureExtraction_features_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: float32 + required: false + num_channels: + name: num_channels + range: float32 + required: false + num_features: + name: num_features + range: float32 + required: false + FeatureExtraction_times: + name: FeatureExtraction_times + description: Times of events that features correspond to (can be a link). + attributes: + array: + name: array + range: FeatureExtraction_times_Array + FeatureExtraction_times_Array: + name: FeatureExtraction_times_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: float64 + required: true + FeatureExtraction_electrodes: + name: FeatureExtraction_electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + is_a: DynamicTableRegion + EventDetection: + name: EventDetection + description: Detected spike events from voltage trace(s). + is_a: NWBDataInterface + attributes: + detection_method: + name: detection_method + description: Description of how events were detected, such as voltage threshold, + or dV/dT threshold, as well as relevant values. + multivalued: false + range: EventDetection_detection_method + required: true + source_idx: + name: source_idx + description: Indices (zero-based) into source ElectricalSeries::data array + corresponding to time of event. ''description'' should define what is meant + by time of event (e.g., .25 ms before action potential peak, zero-crossing + time, etc). The index points to each event from the raw data. + multivalued: false + range: EventDetection_source_idx + required: true + times: + name: times + description: Timestamps of events, in seconds. + multivalued: false + range: EventDetection_times + required: true + EventDetection_detection_method: + name: EventDetection_detection_method + description: Description of how events were detected, such as voltage threshold, + or dV/dT threshold, as well as relevant values. + EventDetection_source_idx: + name: EventDetection_source_idx + description: Indices (zero-based) into source ElectricalSeries::data array corresponding + to time of event. ''description'' should define what is meant by time of event + (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index + points to each event from the raw data. + attributes: + array: + name: array + range: EventDetection_source_idx_Array + EventDetection_source_idx_Array: + name: EventDetection_source_idx_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: int32 + required: true + EventDetection_times: + name: EventDetection_times + description: Timestamps of events, in seconds. + attributes: + unit: + name: unit + description: Unit of measurement for event times, which is fixed to 'seconds'. + range: text + array: + name: array + range: EventDetection_times_Array + EventDetection_times_Array: + name: EventDetection_times_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: float64 + required: true + EventWaveform: + name: EventWaveform + description: Represents either the waveforms of detected events, as extracted + from a raw data trace in /acquisition, or the event waveforms that were stored + during experiment acquisition. + is_a: NWBDataInterface + attributes: + SpikeEventSeries: + name: SpikeEventSeries + description: SpikeEventSeries object(s) containing detected spike event waveforms. + multivalued: true + range: SpikeEventSeries + required: false + FilteredEphys: + name: FilteredEphys + description: Electrophysiology data from one or more channels that has been subjected + to filtering. Examples of filtered data include Theta and Gamma (LFP has its + own interface). FilteredEphys modules publish an ElectricalSeries for each filtered + channel or set of channels. The name of each ElectricalSeries is arbitrary but + should be informative. The source of the filtered data, whether this is from + analysis of another time series or as acquired by hardware, should be noted + in each's TimeSeries::description field. There is no assumed 1::1 correspondence + between filtered ephys signals and electrodes, as a single signal can apply + to many nearby electrodes, and one electrode may have different filtered (e.g., + theta and/or gamma) signals represented. Filter properties should be noted in + the ElectricalSeries 'filtering' attribute. + is_a: NWBDataInterface + attributes: + ElectricalSeries: + name: ElectricalSeries + description: ElectricalSeries object(s) containing filtered electrophysiology + data. + multivalued: true + range: ElectricalSeries + required: true + LFP: + name: LFP + description: LFP data from one or more channels. The electrode map in each published + ElectricalSeries will identify which channels are providing LFP data. Filter + properties should be noted in the ElectricalSeries 'filtering' attribute. + is_a: NWBDataInterface + attributes: + ElectricalSeries: + name: ElectricalSeries + description: ElectricalSeries object(s) containing LFP data for one or more + channels. + multivalued: true + range: ElectricalSeries + required: true + ElectrodeGroup: + name: ElectrodeGroup + description: A physical grouping of electrodes, e.g. a shank of an array. + is_a: NWBContainer + attributes: + description: + name: description + description: Description of this electrode group. + range: text + location: + name: location + description: Location of electrode group. Specify the area, layer, comments + on estimation of area/layer, etc. Use standard atlas names for anatomical + regions when possible. + range: text + position: + name: position + description: stereotaxic or common framework coordinates + multivalued: false + range: ElectrodeGroup_position + required: false + ElectrodeGroup_position: + name: ElectrodeGroup_position + description: stereotaxic or common framework coordinates + ClusterWaveforms: + name: ClusterWaveforms + description: DEPRECATED The mean waveform shape, including standard deviation, + of the different clusters. Ideally, the waveform analysis should be performed + on data that is only high-pass filtered. This is a separate module because it + is expected to require updating. For example, IMEC probes may require different + storage requirements to store/display mean waveforms, requiring a new interface + or an extension of this one. + is_a: NWBDataInterface + attributes: + waveform_filtering: + name: waveform_filtering + description: Filtering applied to data before generating mean/sd + multivalued: false + range: ClusterWaveforms_waveform_filtering + required: true + waveform_mean: + name: waveform_mean + description: The mean waveform for each cluster, using the same indices for + each wave as cluster numbers in the associated Clustering module (i.e, cluster + 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence + should be empty (e.g., zero- filled) + multivalued: false + range: ClusterWaveforms_waveform_mean + required: true + waveform_sd: + name: waveform_sd + description: Stdev of waveforms for each cluster, using the same indices as + in mean + multivalued: false + range: ClusterWaveforms_waveform_sd + required: true + ClusterWaveforms_waveform_filtering: + name: ClusterWaveforms_waveform_filtering + description: Filtering applied to data before generating mean/sd + ClusterWaveforms_waveform_mean: + name: ClusterWaveforms_waveform_mean + description: The mean waveform for each cluster, using the same indices for each + wave as cluster numbers in the associated Clustering module (i.e, cluster 3 + is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should + be empty (e.g., zero- filled) + attributes: + array: + name: array + range: ClusterWaveforms_waveform_mean_Array + ClusterWaveforms_waveform_mean_Array: + name: ClusterWaveforms_waveform_mean_Array + is_a: Arraylike + attributes: + num_clusters: + name: num_clusters + range: float32 + required: false + num_samples: + name: num_samples + range: float32 + required: false + ClusterWaveforms_waveform_sd: + name: ClusterWaveforms_waveform_sd + description: Stdev of waveforms for each cluster, using the same indices as in + mean + attributes: + array: + name: array + range: ClusterWaveforms_waveform_sd_Array + ClusterWaveforms_waveform_sd_Array: + name: ClusterWaveforms_waveform_sd_Array + is_a: Arraylike + attributes: + num_clusters: + name: num_clusters + range: float32 + required: false + num_samples: + name: num_samples + range: float32 + required: false + Clustering: + name: Clustering + description: DEPRECATED Clustered spike data, whether from automatic clustering + tools (e.g., klustakwik) or as a result of manual sorting. + is_a: NWBDataInterface + attributes: + description: + name: description + description: Description of clusters or clustering, (e.g. cluster 0 is noise, + clusters curated using Klusters, etc) + multivalued: false + range: Clustering_description + required: true + num: + name: num + description: Cluster number of each event + multivalued: false + range: Clustering_num + required: true + peak_over_rms: + name: peak_over_rms + description: Maximum ratio of waveform peak to RMS on any channel in the cluster + (provides a basic clustering metric). + multivalued: false + range: Clustering_peak_over_rms + required: true + times: + name: times + description: Times of clustered events, in seconds. This may be a link to + times field in associated FeatureExtraction module. + multivalued: false + range: Clustering_times + required: true + Clustering_description: + name: Clustering_description + description: Description of clusters or clustering, (e.g. cluster 0 is noise, + clusters curated using Klusters, etc) + Clustering_num: + name: Clustering_num + description: Cluster number of each event + attributes: + array: + name: array + range: Clustering_num_Array + Clustering_num_Array: + name: Clustering_num_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: int32 + required: true + Clustering_peak_over_rms: + name: Clustering_peak_over_rms + description: Maximum ratio of waveform peak to RMS on any channel in the cluster + (provides a basic clustering metric). + attributes: + array: + name: array + range: Clustering_peak_over_rms_Array + Clustering_peak_over_rms_Array: + name: Clustering_peak_over_rms_Array + is_a: Arraylike + attributes: + num_clusters: + name: num_clusters + range: float32 + required: true + Clustering_times: + name: Clustering_times + description: Times of clustered events, in seconds. This may be a link to times + field in associated FeatureExtraction module. + attributes: + array: + name: array + range: Clustering_times_Array + Clustering_times_Array: + name: Clustering_times_Array + is_a: Arraylike + attributes: + num_events: + name: num_events + range: float64 + required: true diff --git a/nwb_linkml/schema/core.nwb.epoch.yaml b/nwb_linkml/schema/core.nwb.epoch.yaml new file mode 100644 index 0000000..9cc46e8 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.epoch.yaml @@ -0,0 +1,74 @@ +name: core.nwb.epoch +id: core.nwb.epoch +imports: +- hdmf-common.table +- core.nwb.base +- nwb.language +default_prefix: core.nwb.epoch/ +classes: + TimeIntervals: + name: TimeIntervals + description: A container for aggregating epoch data and the TimeSeries that each + epoch applies to. + is_a: DynamicTable + attributes: + start_time: + name: start_time + description: Start time of epoch, in seconds. + multivalued: false + range: TimeIntervals_start_time + required: true + stop_time: + name: stop_time + description: Stop time of epoch, in seconds. + multivalued: false + range: TimeIntervals_stop_time + required: true + tags: + name: tags + description: User-defined tags that identify or categorize events. + multivalued: false + range: TimeIntervals_tags + required: false + tags_index: + name: tags_index + description: Index for tags. + multivalued: false + range: TimeIntervals_tags_index + required: false + timeseries: + name: timeseries + description: An index into a TimeSeries object. + multivalued: false + range: TimeIntervals_timeseries + required: false + timeseries_index: + name: timeseries_index + description: Index for timeseries. + multivalued: false + range: TimeIntervals_timeseries_index + required: false + TimeIntervals_start_time: + name: TimeIntervals_start_time + description: Start time of epoch, in seconds. + is_a: VectorData + TimeIntervals_stop_time: + name: TimeIntervals_stop_time + description: Stop time of epoch, in seconds. + is_a: VectorData + TimeIntervals_tags: + name: TimeIntervals_tags + description: User-defined tags that identify or categorize events. + is_a: VectorData + TimeIntervals_tags_index: + name: TimeIntervals_tags_index + description: Index for tags. + is_a: VectorIndex + TimeIntervals_timeseries: + name: TimeIntervals_timeseries + description: An index into a TimeSeries object. + is_a: TimeSeriesReferenceVectorData + TimeIntervals_timeseries_index: + name: TimeIntervals_timeseries_index + description: Index for timeseries. + is_a: VectorIndex diff --git a/nwb_linkml/schema/core.nwb.file.yaml b/nwb_linkml/schema/core.nwb.file.yaml new file mode 100644 index 0000000..2bb38bd --- /dev/null +++ b/nwb_linkml/schema/core.nwb.file.yaml @@ -0,0 +1,1099 @@ +name: core.nwb.file +id: core.nwb.file +imports: +- core.nwb.base +- hdmf-common.table +- core.nwb.device +- core.nwb.ecephys +- core.nwb.icephys +- core.nwb.ogen +- core.nwb.ophys +- core.nwb.epoch +- core.nwb.misc +- nwb.language +default_prefix: core.nwb.file/ +classes: + ScratchData: + name: ScratchData + description: Any one-off datasets + is_a: NWBData + attributes: + notes: + name: notes + description: Any notes the user has about the dataset being stored + range: text + NWBFile: + name: NWBFile + description: An NWB file storing cellular-based neurophysiology data from a single + experimental session. + is_a: NWBContainer + attributes: + nwb_version: + name: nwb_version + description: File version string. Use semantic versioning, e.g. 1.2.1. This + will be the name of the format with trailing major, minor and patch numbers. + range: text + file_create_date: + name: file_create_date + description: 'A record of the date the file was created and of subsequent + modifications. The date is stored in UTC with local timezone offset as ISO + 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored + in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. + The file can be created after the experiment was run, so this may differ + from the experiment start time. Each modification to the nwb file adds a + new entry to the array.' + multivalued: false + range: NWBFile_file_create_date + required: true + identifier: + name: identifier + description: A unique text identifier for the file. For example, concatenated + lab name, file creation date/time and experimentalist, or a hash of these + and/or other values. The goal is that the string should be unique to all + other files. + multivalued: false + range: NWBFile_identifier + required: true + session_description: + name: session_description + description: A description of the experimental session and data in the file. + multivalued: false + range: NWBFile_session_description + required: true + session_start_time: + name: session_start_time + description: 'Date and time of the experiment/session start. The date is stored + in UTC with local timezone offset as ISO 8601 extended formatted string: + 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone + offset. Date accuracy is up to milliseconds.' + multivalued: false + range: NWBFile_session_start_time + required: true + timestamps_reference_time: + name: timestamps_reference_time + description: 'Date and time corresponding to time zero of all timestamps. + The date is stored in UTC with local timezone offset as ISO 8601 extended + formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end + in "Z" with no timezone offset. Date accuracy is up to milliseconds. All + times stored in the file use this time as reference (i.e., time zero).' + multivalued: false + range: NWBFile_timestamps_reference_time + required: true + acquisition: + name: acquisition + description: Data streams recorded from the system, including ephys, ophys, + tracking, etc. This group should be read-only after the experiment is completed + and timestamps are corrected to a common timebase. The data stored here + may be links to raw data stored in external NWB files. This will allow keeping + bulky raw data out of the file while preserving the option of keeping some/all + in the file. Acquired data includes tracking and experimental data streams + (i.e., everything measured from the system). If bulky data is stored in + the /acquisition group, the data can exist in a separate NWB file that is + linked to by the file being used for processing and analysis. + multivalued: false + range: NWBFile_acquisition + required: true + analysis: + name: analysis + description: Lab-specific and custom scientific analysis of data. There is + no defined format for the content of this group - the format is up to the + individual user/lab. To facilitate sharing analysis data between labs, the + contents here should be stored in standard types (e.g., neurodata_types) + and appropriately documented. The file can store lab-specific and custom + data analysis without restriction on its form or schema, reducing data formatting + restrictions on end users. Such data should be placed in the analysis group. + The analysis data should be documented so that it could be shared with other + labs. + multivalued: false + range: NWBFile_analysis + required: true + scratch: + name: scratch + description: A place to store one-off analysis results. Data placed here is + not intended for sharing. By placing data here, users acknowledge that there + is no guarantee that their data meets any standard. + multivalued: false + range: NWBFile_scratch + required: false + processing: + name: processing + description: The home for ProcessingModules. These modules perform intermediate + analysis of data that is necessary to perform before scientific analysis. + Examples include spike clustering, extracting position from tracking data, + stitching together image slices. ProcessingModules can be large and express + many data sets from relatively complex analysis (e.g., spike detection and + clustering) or small, representing extraction of position information from + tracking video, or even binary lick/no-lick decisions. Common software tools + (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' + refers to intermediate analysis of the acquired data to make it more amenable + to scientific analysis. + multivalued: false + range: NWBFile_processing + required: true + stimulus: + name: stimulus + description: Data pushed into the system (eg, video stimulus, sound, voltage, + etc) and secondary representations of that data (eg, measurements of something + used as a stimulus). This group should be made read-only after experiment + complete and timestamps are corrected to common timebase. Stores both presented + stimuli and stimulus templates, the latter in case the same stimulus is + presented multiple times, or is pulled from an external stimulus library. + Stimuli are here defined as any signal that is pushed into the system as + part of the experiment (eg, sound, video, voltage, etc). Many different + experiments can use the same stimuli, and stimuli can be re-used during + an experiment. The stimulus group is organized so that one version of template + stimuli can be stored and these be used multiple times. These templates + can exist in the present file or can be linked to a remote library file. + multivalued: false + range: NWBFile_stimulus + required: true + general: + name: general + description: Experimental metadata, including protocol, notes and description + of hardware device(s). The metadata stored in this section should be used + to describe the experiment. Metadata necessary for interpreting the data + is stored with the data. General experimental metadata, including animal + strain, experimental protocols, experimenter, devices, etc, are stored under + 'general'. Core metadata (e.g., that required to interpret data fields) + is stored with the data itself, and implicitly defined by the file specification + (e.g., time is in seconds). The strategy used here for storing non-core + metadata is to use free-form text fields, such as would appear in sentences + or paragraphs from a Methods section. Metadata fields are text to enable + them to be more general, for example to represent ranges instead of numerical + values. Machine-readable metadata is stored as attributes to these free-form + datasets. All entries in the below table are to be included when data is + present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) + should not be created unless there is data to store within them. + multivalued: false + range: NWBFile_general + required: true + intervals: + name: intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during + an experiment, or epochs (see epochs subgroup) deriving from analysis of + data. + multivalued: false + range: NWBFile_intervals + required: false + units: + name: units + description: Data about sorted spike units. + multivalued: false + range: NWBFile_units + required: false + NWBFile_file_create_date: + name: NWBFile_file_create_date + description: 'A record of the date the file was created and of subsequent modifications. + The date is stored in UTC with local timezone offset as ISO 8601 extended formatted + strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with + no timezone offset. Date accuracy is up to milliseconds. The file can be created + after the experiment was run, so this may differ from the experiment start time. + Each modification to the nwb file adds a new entry to the array.' + attributes: + array: + name: array + range: NWBFile_file_create_date_Array + NWBFile_file_create_date_Array: + name: NWBFile_file_create_date_Array + is_a: Arraylike + attributes: + num_modifications: + name: num_modifications + range: isodatetime + required: true + NWBFile_identifier: + name: NWBFile_identifier + description: A unique text identifier for the file. For example, concatenated + lab name, file creation date/time and experimentalist, or a hash of these and/or + other values. The goal is that the string should be unique to all other files. + NWBFile_session_description: + name: NWBFile_session_description + description: A description of the experimental session and data in the file. + NWBFile_session_start_time: + name: NWBFile_session_start_time + description: 'Date and time of the experiment/session start. The date is stored + in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. + Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up + to milliseconds.' + NWBFile_timestamps_reference_time: + name: NWBFile_timestamps_reference_time + description: 'Date and time corresponding to time zero of all timestamps. The + date is stored in UTC with local timezone offset as ISO 8601 extended formatted + string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no + timezone offset. Date accuracy is up to milliseconds. All times stored in the + file use this time as reference (i.e., time zero).' + NWBFile_acquisition: + name: NWBFile_acquisition + description: Data streams recorded from the system, including ephys, ophys, tracking, + etc. This group should be read-only after the experiment is completed and timestamps + are corrected to a common timebase. The data stored here may be links to raw + data stored in external NWB files. This will allow keeping bulky raw data out + of the file while preserving the option of keeping some/all in the file. Acquired + data includes tracking and experimental data streams (i.e., everything measured + from the system). If bulky data is stored in the /acquisition group, the data + can exist in a separate NWB file that is linked to by the file being used for + processing and analysis. + attributes: + NWBDataInterface: + name: NWBDataInterface + description: Acquired, raw data. + multivalued: true + range: NWBDataInterface + required: false + DynamicTable: + name: DynamicTable + description: Tabular data that is relevant to acquisition + multivalued: true + range: DynamicTable + required: false + NWBFile_analysis: + name: NWBFile_analysis + description: Lab-specific and custom scientific analysis of data. There is no + defined format for the content of this group - the format is up to the individual + user/lab. To facilitate sharing analysis data between labs, the contents here + should be stored in standard types (e.g., neurodata_types) and appropriately + documented. The file can store lab-specific and custom data analysis without + restriction on its form or schema, reducing data formatting restrictions on + end users. Such data should be placed in the analysis group. The analysis data + should be documented so that it could be shared with other labs. + attributes: + NWBContainer: + name: NWBContainer + description: Custom analysis results. + multivalued: true + range: NWBContainer + required: false + DynamicTable: + name: DynamicTable + description: Tabular data that is relevant to data stored in analysis + multivalued: true + range: DynamicTable + required: false + NWBFile_scratch: + name: NWBFile_scratch + description: A place to store one-off analysis results. Data placed here is not + intended for sharing. By placing data here, users acknowledge that there is + no guarantee that their data meets any standard. + attributes: + ScratchData: + name: ScratchData + description: Any one-off datasets + multivalued: true + range: ScratchData + required: false + NWBContainer: + name: NWBContainer + description: Any one-off containers + multivalued: true + range: NWBContainer + required: false + DynamicTable: + name: DynamicTable + description: Any one-off tables + multivalued: true + range: DynamicTable + required: false + NWBFile_processing: + name: NWBFile_processing + description: The home for ProcessingModules. These modules perform intermediate + analysis of data that is necessary to perform before scientific analysis. Examples + include spike clustering, extracting position from tracking data, stitching + together image slices. ProcessingModules can be large and express many data + sets from relatively complex analysis (e.g., spike detection and clustering) + or small, representing extraction of position information from tracking video, + or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, + MClust) are expected to read/write data here. 'Processing' refers to intermediate + analysis of the acquired data to make it more amenable to scientific analysis. + attributes: + ProcessingModule: + name: ProcessingModule + description: Intermediate analysis of acquired data. + multivalued: true + range: ProcessingModule + required: false + NWBFile_stimulus: + name: NWBFile_stimulus + description: Data pushed into the system (eg, video stimulus, sound, voltage, + etc) and secondary representations of that data (eg, measurements of something + used as a stimulus). This group should be made read-only after experiment complete + and timestamps are corrected to common timebase. Stores both presented stimuli + and stimulus templates, the latter in case the same stimulus is presented multiple + times, or is pulled from an external stimulus library. Stimuli are here defined + as any signal that is pushed into the system as part of the experiment (eg, + sound, video, voltage, etc). Many different experiments can use the same stimuli, + and stimuli can be re-used during an experiment. The stimulus group is organized + so that one version of template stimuli can be stored and these be used multiple + times. These templates can exist in the present file or can be linked to a remote + library file. + attributes: + presentation: + name: presentation + description: Stimuli presented during the experiment. + multivalued: false + range: NWBFile_stimulus_presentation + required: true + templates: + name: templates + description: Template stimuli. Timestamps in templates are based on stimulus + design and are relative to the beginning of the stimulus. When templates + are used, the stimulus instances must convert presentation times to the + experiment`s time reference frame. + multivalued: false + range: NWBFile_stimulus_templates + required: true + NWBFile_stimulus_presentation: + name: NWBFile_stimulus_presentation + description: Stimuli presented during the experiment. + attributes: + TimeSeries: + name: TimeSeries + description: TimeSeries objects containing data of presented stimuli. + multivalued: true + range: TimeSeries + required: false + NWBFile_stimulus_templates: + name: NWBFile_stimulus_templates + description: Template stimuli. Timestamps in templates are based on stimulus design + and are relative to the beginning of the stimulus. When templates are used, + the stimulus instances must convert presentation times to the experiment`s time + reference frame. + attributes: + TimeSeries: + name: TimeSeries + description: TimeSeries objects containing template data of presented stimuli. + multivalued: true + range: TimeSeries + required: false + Images: + name: Images + description: Images objects containing images of presented stimuli. + multivalued: true + range: Images + required: false + NWBFile_general: + name: NWBFile_general + description: Experimental metadata, including protocol, notes and description + of hardware device(s). The metadata stored in this section should be used to + describe the experiment. Metadata necessary for interpreting the data is stored + with the data. General experimental metadata, including animal strain, experimental + protocols, experimenter, devices, etc, are stored under 'general'. Core metadata + (e.g., that required to interpret data fields) is stored with the data itself, + and implicitly defined by the file specification (e.g., time is in seconds). + The strategy used here for storing non-core metadata is to use free-form text + fields, such as would appear in sentences or paragraphs from a Methods section. + Metadata fields are text to enable them to be more general, for example to represent + ranges instead of numerical values. Machine-readable metadata is stored as attributes + to these free-form datasets. All entries in the below table are to be included + when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology + experiment) should not be created unless there is data to store within them. + attributes: + data_collection: + name: data_collection + description: Notes about data collection and analysis. + multivalued: false + range: NWBFile_general_data_collection + required: false + experiment_description: + name: experiment_description + description: General description of the experiment. + multivalued: false + range: NWBFile_general_experiment_description + required: false + experimenter: + name: experimenter + description: Name of person(s) who performed the experiment. Can also specify + roles of different people involved. + multivalued: false + range: NWBFile_general_experimenter + required: false + institution: + name: institution + description: Institution(s) where experiment was performed. + multivalued: false + range: NWBFile_general_institution + required: false + keywords: + name: keywords + description: Terms to search over. + multivalued: false + range: NWBFile_general_keywords + required: false + lab: + name: lab + description: Laboratory where experiment was performed. + multivalued: false + range: NWBFile_general_lab + required: false + notes: + name: notes + description: Notes about the experiment. + multivalued: false + range: NWBFile_general_notes + required: false + pharmacology: + name: pharmacology + description: Description of drugs used, including how and when they were administered. + Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + multivalued: false + range: NWBFile_general_pharmacology + required: false + protocol: + name: protocol + description: Experimental protocol, if applicable. e.g., include IACUC protocol + number. + multivalued: false + range: NWBFile_general_protocol + required: false + related_publications: + name: related_publications + description: Publication information. PMID, DOI, URL, etc. + multivalued: false + range: NWBFile_general_related_publications + required: false + session_id: + name: session_id + description: Lab-specific ID for the session. + multivalued: false + range: NWBFile_general_session_id + required: false + slices: + name: slices + description: Description of slices, including information about preparation + thickness, orientation, temperature, and bath solution. + multivalued: false + range: NWBFile_general_slices + required: false + source_script: + name: source_script + description: Script file or link to public source code used to create this + NWB file. + multivalued: false + range: NWBFile_general_source_script + required: false + stimulus: + name: stimulus + description: Notes about stimuli, such as how and where they were presented. + multivalued: false + range: NWBFile_general_stimulus + required: false + surgery: + name: surgery + description: Narrative description about surgery/surgeries, including date(s) + and who performed surgery. + multivalued: false + range: NWBFile_general_surgery + required: false + virus: + name: virus + description: Information about virus(es) used in experiments, including virus + ID, source, date made, injection location, volume, etc. + multivalued: false + range: NWBFile_general_virus + required: false + LabMetaData: + name: LabMetaData + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + multivalued: true + range: LabMetaData + required: false + devices: + name: devices + description: Description of hardware devices used during experiment, e.g., + monitors, ADC boards, microscopes, etc. + multivalued: false + range: NWBFile_general_devices + required: false + subject: + name: subject + description: Information about the animal or person from which the data was + measured. + multivalued: false + range: NWBFile_general_subject + required: false + extracellular_ephys: + name: extracellular_ephys + description: Metadata related to extracellular electrophysiology. + multivalued: false + range: NWBFile_general_extracellular_ephys + required: false + intracellular_ephys: + name: intracellular_ephys + description: Metadata related to intracellular electrophysiology. + multivalued: false + range: NWBFile_general_intracellular_ephys + required: false + optogenetics: + name: optogenetics + description: Metadata describing optogenetic stimuluation. + multivalued: false + range: NWBFile_general_optogenetics + required: false + optophysiology: + name: optophysiology + description: Metadata related to optophysiology. + multivalued: false + range: NWBFile_general_optophysiology + required: false + NWBFile_general_data_collection: + name: NWBFile_general_data_collection + description: Notes about data collection and analysis. + NWBFile_general_experiment_description: + name: NWBFile_general_experiment_description + description: General description of the experiment. + NWBFile_general_experimenter: + name: NWBFile_general_experimenter + description: Name of person(s) who performed the experiment. Can also specify + roles of different people involved. + attributes: + array: + name: array + range: NWBFile_general_experimenter_Array + NWBFile_general_experimenter_Array: + name: NWBFile_general_experimenter_Array + is_a: Arraylike + attributes: + num_experimenters: + name: num_experimenters + range: text + required: true + NWBFile_general_institution: + name: NWBFile_general_institution + description: Institution(s) where experiment was performed. + NWBFile_general_keywords: + name: NWBFile_general_keywords + description: Terms to search over. + attributes: + array: + name: array + range: NWBFile_general_keywords_Array + NWBFile_general_keywords_Array: + name: NWBFile_general_keywords_Array + is_a: Arraylike + attributes: + num_keywords: + name: num_keywords + range: text + required: true + NWBFile_general_lab: + name: NWBFile_general_lab + description: Laboratory where experiment was performed. + NWBFile_general_notes: + name: NWBFile_general_notes + description: Notes about the experiment. + NWBFile_general_pharmacology: + name: NWBFile_general_pharmacology + description: Description of drugs used, including how and when they were administered. + Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + NWBFile_general_protocol: + name: NWBFile_general_protocol + description: Experimental protocol, if applicable. e.g., include IACUC protocol + number. + NWBFile_general_related_publications: + name: NWBFile_general_related_publications + description: Publication information. PMID, DOI, URL, etc. + attributes: + array: + name: array + range: NWBFile_general_related_publications_Array + NWBFile_general_related_publications_Array: + name: NWBFile_general_related_publications_Array + is_a: Arraylike + attributes: + num_publications: + name: num_publications + range: text + required: true + NWBFile_general_session_id: + name: NWBFile_general_session_id + description: Lab-specific ID for the session. + NWBFile_general_slices: + name: NWBFile_general_slices + description: Description of slices, including information about preparation thickness, + orientation, temperature, and bath solution. + NWBFile_general_source_script: + name: NWBFile_general_source_script + description: Script file or link to public source code used to create this NWB + file. + attributes: + file_name: + name: file_name + description: Name of script file. + range: text + NWBFile_general_stimulus: + name: NWBFile_general_stimulus + description: Notes about stimuli, such as how and where they were presented. + NWBFile_general_surgery: + name: NWBFile_general_surgery + description: Narrative description about surgery/surgeries, including date(s) + and who performed surgery. + NWBFile_general_virus: + name: NWBFile_general_virus + description: Information about virus(es) used in experiments, including virus + ID, source, date made, injection location, volume, etc. + NWBFile_general_devices: + name: NWBFile_general_devices + description: Description of hardware devices used during experiment, e.g., monitors, + ADC boards, microscopes, etc. + attributes: + Device: + name: Device + description: Data acquisition devices. + multivalued: true + range: Device + required: false + NWBFile_general_subject: + name: NWBFile_general_subject + description: Information about the animal or person from which the data was measured. + is_a: Subject + NWBFile_general_extracellular_ephys: + name: NWBFile_general_extracellular_ephys + description: Metadata related to extracellular electrophysiology. + attributes: + ElectrodeGroup: + name: ElectrodeGroup + description: Physical group of electrodes. + multivalued: true + range: ElectrodeGroup + required: false + electrodes: + name: electrodes + description: A table of all electrodes (i.e. channels) used for recording. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes + required: false + NWBFile_general_extracellular_ephys_electrodes: + name: NWBFile_general_extracellular_ephys_electrodes + description: A table of all electrodes (i.e. channels) used for recording. + is_a: DynamicTable + attributes: + x: + name: x + description: x coordinate of the channel location in the brain (+x is posterior). + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_x + required: false + y: + name: y + description: y coordinate of the channel location in the brain (+y is inferior). + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_y + required: false + z: + name: z + description: z coordinate of the channel location in the brain (+z is right). + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_z + required: false + imp: + name: imp + description: Impedance of the channel, in ohms. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_imp + required: false + location: + name: location + description: Location of the electrode (channel). Specify the area, layer, + comments on estimation of area/layer, stereotaxic coordinates if in vivo, + etc. Use standard atlas names for anatomical regions when possible. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_location + required: true + filtering: + name: filtering + description: Description of hardware filtering, including the filter name + and frequency cutoffs. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_filtering + required: false + group: + name: group + description: Reference to the ElectrodeGroup this electrode is a part of. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_group + required: true + group_name: + name: group_name + description: Name of the ElectrodeGroup this electrode is a part of. + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_group_name + required: true + rel_x: + name: rel_x + description: x coordinate in electrode group + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_rel_x + required: false + rel_y: + name: rel_y + description: y coordinate in electrode group + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_rel_y + required: false + rel_z: + name: rel_z + description: z coordinate in electrode group + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_rel_z + required: false + reference: + name: reference + description: Description of the reference electrode and/or reference scheme + used for this electrode, e.g., "stainless steel skull screw" or "online + common average referencing". + multivalued: false + range: NWBFile_general_extracellular_ephys_electrodes_reference + required: false + NWBFile_general_extracellular_ephys_electrodes_x: + name: NWBFile_general_extracellular_ephys_electrodes_x + description: x coordinate of the channel location in the brain (+x is posterior). + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_y: + name: NWBFile_general_extracellular_ephys_electrodes_y + description: y coordinate of the channel location in the brain (+y is inferior). + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_z: + name: NWBFile_general_extracellular_ephys_electrodes_z + description: z coordinate of the channel location in the brain (+z is right). + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_imp: + name: NWBFile_general_extracellular_ephys_electrodes_imp + description: Impedance of the channel, in ohms. + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_location: + name: NWBFile_general_extracellular_ephys_electrodes_location + description: Location of the electrode (channel). Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard + atlas names for anatomical regions when possible. + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_filtering: + name: NWBFile_general_extracellular_ephys_electrodes_filtering + description: Description of hardware filtering, including the filter name and + frequency cutoffs. + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_group: + name: NWBFile_general_extracellular_ephys_electrodes_group + description: Reference to the ElectrodeGroup this electrode is a part of. + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_group_name: + name: NWBFile_general_extracellular_ephys_electrodes_group_name + description: Name of the ElectrodeGroup this electrode is a part of. + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_rel_x: + name: NWBFile_general_extracellular_ephys_electrodes_rel_x + description: x coordinate in electrode group + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_rel_y: + name: NWBFile_general_extracellular_ephys_electrodes_rel_y + description: y coordinate in electrode group + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_rel_z: + name: NWBFile_general_extracellular_ephys_electrodes_rel_z + description: z coordinate in electrode group + is_a: VectorData + NWBFile_general_extracellular_ephys_electrodes_reference: + name: NWBFile_general_extracellular_ephys_electrodes_reference + description: Description of the reference electrode and/or reference scheme used + for this electrode, e.g., "stainless steel skull screw" or "online common average + referencing". + is_a: VectorData + NWBFile_general_intracellular_ephys: + name: NWBFile_general_intracellular_ephys + description: Metadata related to intracellular electrophysiology. + attributes: + filtering: + name: filtering + description: '[DEPRECATED] Use IntracellularElectrode.filtering instead. Description + of filtering used. Includes filtering type and parameters, frequency fall-off, + etc. If this changes between TimeSeries, filter description should be stored + as a text attribute for each TimeSeries.' + multivalued: false + range: NWBFile_general_intracellular_ephys_filtering + required: false + IntracellularElectrode: + name: IntracellularElectrode + description: An intracellular electrode. + multivalued: true + range: IntracellularElectrode + required: false + sweep_table: + name: sweep_table + description: '[DEPRECATED] Table used to group different PatchClampSeries. + SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions + tables provide enhanced support for experiment metadata.' + multivalued: false + range: NWBFile_general_intracellular_ephys_sweep_table + required: false + intracellular_recordings: + name: intracellular_recordings + description: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording. Each row in the table represents + a single recording consisting typically of a stimulus and a corresponding + response. In some cases, however, only a stimulus or a response are recorded + as as part of an experiment. In this case both, the stimulus and response + will point to the same TimeSeries while the idx_start and count of the invalid + column will be set to -1, thus, indicating that no values have been recorded + for the stimulus or response, respectively. Note, a recording MUST contain + at least a stimulus or a response. Typically the stimulus and response are + PatchClampSeries. However, the use of AD/DA channels that are not associated + to an electrode is also common in intracellular electrophysiology, in which + case other TimeSeries may be used. + multivalued: false + range: NWBFile_general_intracellular_ephys_intracellular_recordings + required: false + simultaneous_recordings: + name: simultaneous_recordings + description: A table for grouping different intracellular recordings from + the IntracellularRecordingsTable table together that were recorded simultaneously + from different electrodes + multivalued: false + range: NWBFile_general_intracellular_ephys_simultaneous_recordings + required: false + sequential_recordings: + name: sequential_recordings + description: A table for grouping different sequential recordings from the + SimultaneousRecordingsTable table together. This is typically used to group + together sequential recordings where the a sequence of stimuli of the same + type with varying parameters have been presented in a sequence. + multivalued: false + range: NWBFile_general_intracellular_ephys_sequential_recordings + required: false + repetitions: + name: repetitions + description: A table for grouping different sequential intracellular recordings + together. With each SequentialRecording typically representing a particular + type of stimulus, the RepetitionsTable table is typically used to group + sets of stimuli applied in sequence. + multivalued: false + range: NWBFile_general_intracellular_ephys_repetitions + required: false + experimental_conditions: + name: experimental_conditions + description: A table for grouping different intracellular recording repetitions + together that belong to the same experimental experimental_conditions. + multivalued: false + range: NWBFile_general_intracellular_ephys_experimental_conditions + required: false + NWBFile_general_intracellular_ephys_filtering: + name: NWBFile_general_intracellular_ephys_filtering + description: '[DEPRECATED] Use IntracellularElectrode.filtering instead. Description + of filtering used. Includes filtering type and parameters, frequency fall-off, + etc. If this changes between TimeSeries, filter description should be stored + as a text attribute for each TimeSeries.' + NWBFile_general_intracellular_ephys_sweep_table: + name: NWBFile_general_intracellular_ephys_sweep_table + description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable + is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions + tables provide enhanced support for experiment metadata.' + is_a: SweepTable + NWBFile_general_intracellular_ephys_intracellular_recordings: + name: NWBFile_general_intracellular_ephys_intracellular_recordings + description: A table to group together a stimulus and response from a single electrode + and a single simultaneous recording. Each row in the table represents a single + recording consisting typically of a stimulus and a corresponding response. In + some cases, however, only a stimulus or a response are recorded as as part of + an experiment. In this case both, the stimulus and response will point to the + same TimeSeries while the idx_start and count of the invalid column will be + set to -1, thus, indicating that no values have been recorded for the stimulus + or response, respectively. Note, a recording MUST contain at least a stimulus + or a response. Typically the stimulus and response are PatchClampSeries. However, + the use of AD/DA channels that are not associated to an electrode is also common + in intracellular electrophysiology, in which case other TimeSeries may be used. + is_a: IntracellularRecordingsTable + NWBFile_general_intracellular_ephys_simultaneous_recordings: + name: NWBFile_general_intracellular_ephys_simultaneous_recordings + description: A table for grouping different intracellular recordings from the + IntracellularRecordingsTable table together that were recorded simultaneously + from different electrodes + is_a: SimultaneousRecordingsTable + NWBFile_general_intracellular_ephys_sequential_recordings: + name: NWBFile_general_intracellular_ephys_sequential_recordings + description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable + table together. This is typically used to group together sequential recordings + where the a sequence of stimuli of the same type with varying parameters have + been presented in a sequence. + is_a: SequentialRecordingsTable + NWBFile_general_intracellular_ephys_repetitions: + name: NWBFile_general_intracellular_ephys_repetitions + description: A table for grouping different sequential intracellular recordings + together. With each SequentialRecording typically representing a particular + type of stimulus, the RepetitionsTable table is typically used to group sets + of stimuli applied in sequence. + is_a: RepetitionsTable + NWBFile_general_intracellular_ephys_experimental_conditions: + name: NWBFile_general_intracellular_ephys_experimental_conditions + description: A table for grouping different intracellular recording repetitions + together that belong to the same experimental experimental_conditions. + is_a: ExperimentalConditionsTable + NWBFile_general_optogenetics: + name: NWBFile_general_optogenetics + description: Metadata describing optogenetic stimuluation. + attributes: + OptogeneticStimulusSite: + name: OptogeneticStimulusSite + description: An optogenetic stimulation site. + multivalued: true + range: OptogeneticStimulusSite + required: false + NWBFile_general_optophysiology: + name: NWBFile_general_optophysiology + description: Metadata related to optophysiology. + attributes: + ImagingPlane: + name: ImagingPlane + description: An imaging plane. + multivalued: true + range: ImagingPlane + required: false + NWBFile_intervals: + name: NWBFile_intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + multivalued: false + range: NWBFile_intervals_epochs + required: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + multivalued: false + range: NWBFile_intervals_trials + required: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + multivalued: false + range: NWBFile_intervals_invalid_times + required: false + TimeIntervals: + name: TimeIntervals + description: Optional additional table(s) for describing other experimental + time intervals. + multivalued: true + range: TimeIntervals + required: false + NWBFile_intervals_epochs: + name: NWBFile_intervals_epochs + description: Divisions in time marking experimental stages or sub-divisions of + a single recording session. + is_a: TimeIntervals + NWBFile_intervals_trials: + name: NWBFile_intervals_trials + description: Repeated experimental events that have a logical grouping. + is_a: TimeIntervals + NWBFile_intervals_invalid_times: + name: NWBFile_intervals_invalid_times + description: Time intervals that should be removed from analysis. + is_a: TimeIntervals + NWBFile_units: + name: NWBFile_units + description: Data about sorted spike units. + is_a: Units + LabMetaData: + name: LabMetaData + description: Lab-specific meta-data. + is_a: NWBContainer + Subject: + name: Subject + description: Information about the animal or person from which the data was measured. + is_a: NWBContainer + attributes: + age: + name: age + description: Age of subject. Can be supplied instead of 'date_of_birth'. + multivalued: false + range: Subject_age + required: false + date_of_birth: + name: date_of_birth + description: Date of birth of subject. Can be supplied instead of 'age'. + multivalued: false + range: Subject_date_of_birth + required: false + description: + name: description + description: Description of subject and where subject came from (e.g., breeder, + if animal). + multivalued: false + range: Subject_description + required: false + genotype: + name: genotype + description: Genetic strain. If absent, assume Wild Type (WT). + multivalued: false + range: Subject_genotype + required: false + sex: + name: sex + description: Gender of subject. + multivalued: false + range: Subject_sex + required: false + species: + name: species + description: Species of subject. + multivalued: false + range: Subject_species + required: false + strain: + name: strain + description: Strain of subject. + multivalued: false + range: Subject_strain + required: false + subject_id: + name: subject_id + description: ID of animal/person used/participating in experiment (lab convention). + multivalued: false + range: Subject_subject_id + required: false + weight: + name: weight + description: Weight at time of experiment, at time of surgery and at other + important times. + multivalued: false + range: Subject_weight + required: false + Subject_age: + name: Subject_age + description: Age of subject. Can be supplied instead of 'date_of_birth'. + attributes: + reference: + name: reference + description: Age is with reference to this event. Can be 'birth' or 'gestational'. + If reference is omitted, 'birth' is implied. + range: text + Subject_date_of_birth: + name: Subject_date_of_birth + description: Date of birth of subject. Can be supplied instead of 'age'. + Subject_description: + name: Subject_description + description: Description of subject and where subject came from (e.g., breeder, + if animal). + Subject_genotype: + name: Subject_genotype + description: Genetic strain. If absent, assume Wild Type (WT). + Subject_sex: + name: Subject_sex + description: Gender of subject. + Subject_species: + name: Subject_species + description: Species of subject. + Subject_strain: + name: Subject_strain + description: Strain of subject. + Subject_subject_id: + name: Subject_subject_id + description: ID of animal/person used/participating in experiment (lab convention). + Subject_weight: + name: Subject_weight + description: Weight at time of experiment, at time of surgery and at other important + times. diff --git a/nwb_linkml/schema/core.nwb.icephys.yaml b/nwb_linkml/schema/core.nwb.icephys.yaml new file mode 100644 index 0000000..019cd8a --- /dev/null +++ b/nwb_linkml/schema/core.nwb.icephys.yaml @@ -0,0 +1,704 @@ +name: core.nwb.icephys +id: core.nwb.icephys +imports: +- core.nwb.base +- hdmf-common.table +- nwb.language +default_prefix: core.nwb.icephys/ +classes: + PatchClampSeries: + name: PatchClampSeries + description: An abstract base class for patch-clamp data - stimulus or response, + current or voltage. + is_a: TimeSeries + attributes: + stimulus_description: + name: stimulus_description + description: Protocol/stimulus name for this patch-clamp dataset. + range: text + sweep_number: + name: sweep_number + description: Sweep number, allows to group different PatchClampSeries together. + range: uint32 + data: + name: data + description: Recorded voltage or current. + multivalued: false + range: PatchClampSeries_data + required: true + gain: + name: gain + description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt + (c-clamp). + multivalued: false + range: PatchClampSeries_gain + required: false + PatchClampSeries_data: + name: PatchClampSeries_data + description: Recorded voltage or current. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + array: + name: array + range: PatchClampSeries_data_Array + PatchClampSeries_data_Array: + name: PatchClampSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + PatchClampSeries_gain: + name: PatchClampSeries_gain + description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + CurrentClampSeries: + name: CurrentClampSeries + description: Voltage data from an intracellular current-clamp recording. A corresponding + CurrentClampStimulusSeries (stored separately as a stimulus) is used to store + the current injected. + is_a: PatchClampSeries + attributes: + data: + name: data + description: Recorded voltage. + multivalued: false + range: CurrentClampSeries_data + required: true + bias_current: + name: bias_current + description: Bias current, in amps. + multivalued: false + range: CurrentClampSeries_bias_current + required: false + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + multivalued: false + range: CurrentClampSeries_bridge_balance + required: false + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + multivalued: false + range: CurrentClampSeries_capacitance_compensation + required: false + CurrentClampSeries_data: + name: CurrentClampSeries_data + description: Recorded voltage. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'volts'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + CurrentClampSeries_bias_current: + name: CurrentClampSeries_bias_current + description: Bias current, in amps. + CurrentClampSeries_bridge_balance: + name: CurrentClampSeries_bridge_balance + description: Bridge balance, in ohms. + CurrentClampSeries_capacitance_compensation: + name: CurrentClampSeries_capacitance_compensation + description: Capacitance compensation, in farads. + IZeroClampSeries: + name: IZeroClampSeries + description: Voltage data from an intracellular recording when all current and + amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There + is no CurrentClampStimulusSeries associated with an IZero series because the + amplifier is disconnected and no stimulus can reach the cell. + is_a: CurrentClampSeries + attributes: + stimulus_description: + name: stimulus_description + description: An IZeroClampSeries has no stimulus, so this attribute is automatically + set to "N/A" + range: text + bias_current: + name: bias_current + description: Bias current, in amps, fixed to 0.0. + multivalued: false + range: IZeroClampSeries_bias_current + required: true + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms, fixed to 0.0. + multivalued: false + range: IZeroClampSeries_bridge_balance + required: true + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads, fixed to 0.0. + multivalued: false + range: IZeroClampSeries_capacitance_compensation + required: true + IZeroClampSeries_bias_current: + name: IZeroClampSeries_bias_current + description: Bias current, in amps, fixed to 0.0. + IZeroClampSeries_bridge_balance: + name: IZeroClampSeries_bridge_balance + description: Bridge balance, in ohms, fixed to 0.0. + IZeroClampSeries_capacitance_compensation: + name: IZeroClampSeries_capacitance_compensation + description: Capacitance compensation, in farads, fixed to 0.0. + CurrentClampStimulusSeries: + name: CurrentClampStimulusSeries + description: Stimulus current applied during current clamp recording. + is_a: PatchClampSeries + attributes: + data: + name: data + description: Stimulus current applied. + multivalued: false + range: CurrentClampStimulusSeries_data + required: true + CurrentClampStimulusSeries_data: + name: CurrentClampStimulusSeries_data + description: Stimulus current applied. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + VoltageClampSeries: + name: VoltageClampSeries + description: Current data from an intracellular voltage-clamp recording. A corresponding + VoltageClampStimulusSeries (stored separately as a stimulus) is used to store + the voltage injected. + is_a: PatchClampSeries + attributes: + data: + name: data + description: Recorded current. + multivalued: false + range: VoltageClampSeries_data + required: true + capacitance_fast: + name: capacitance_fast + description: Fast capacitance, in farads. + multivalued: false + range: VoltageClampSeries_capacitance_fast + required: false + capacitance_slow: + name: capacitance_slow + description: Slow capacitance, in farads. + multivalued: false + range: VoltageClampSeries_capacitance_slow + required: false + resistance_comp_bandwidth: + name: resistance_comp_bandwidth + description: Resistance compensation bandwidth, in hertz. + multivalued: false + range: VoltageClampSeries_resistance_comp_bandwidth + required: false + resistance_comp_correction: + name: resistance_comp_correction + description: Resistance compensation correction, in percent. + multivalued: false + range: VoltageClampSeries_resistance_comp_correction + required: false + resistance_comp_prediction: + name: resistance_comp_prediction + description: Resistance compensation prediction, in percent. + multivalued: false + range: VoltageClampSeries_resistance_comp_prediction + required: false + whole_cell_capacitance_comp: + name: whole_cell_capacitance_comp + description: Whole cell capacitance compensation, in farads. + multivalued: false + range: VoltageClampSeries_whole_cell_capacitance_comp + required: false + whole_cell_series_resistance_comp: + name: whole_cell_series_resistance_comp + description: Whole cell series resistance compensation, in ohms. + multivalued: false + range: VoltageClampSeries_whole_cell_series_resistance_comp + required: false + VoltageClampSeries_data: + name: VoltageClampSeries_data + description: Recorded current. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + VoltageClampSeries_capacitance_fast: + name: VoltageClampSeries_capacitance_fast + description: Fast capacitance, in farads. + attributes: + unit: + name: unit + description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + range: text + VoltageClampSeries_capacitance_slow: + name: VoltageClampSeries_capacitance_slow + description: Slow capacitance, in farads. + attributes: + unit: + name: unit + description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + range: text + VoltageClampSeries_resistance_comp_bandwidth: + name: VoltageClampSeries_resistance_comp_bandwidth + description: Resistance compensation bandwidth, in hertz. + attributes: + unit: + name: unit + description: Unit of measurement for resistance_comp_bandwidth, which is fixed + to 'hertz'. + range: text + VoltageClampSeries_resistance_comp_correction: + name: VoltageClampSeries_resistance_comp_correction + description: Resistance compensation correction, in percent. + attributes: + unit: + name: unit + description: Unit of measurement for resistance_comp_correction, which is + fixed to 'percent'. + range: text + VoltageClampSeries_resistance_comp_prediction: + name: VoltageClampSeries_resistance_comp_prediction + description: Resistance compensation prediction, in percent. + attributes: + unit: + name: unit + description: Unit of measurement for resistance_comp_prediction, which is + fixed to 'percent'. + range: text + VoltageClampSeries_whole_cell_capacitance_comp: + name: VoltageClampSeries_whole_cell_capacitance_comp + description: Whole cell capacitance compensation, in farads. + attributes: + unit: + name: unit + description: Unit of measurement for whole_cell_capacitance_comp, which is + fixed to 'farads'. + range: text + VoltageClampSeries_whole_cell_series_resistance_comp: + name: VoltageClampSeries_whole_cell_series_resistance_comp + description: Whole cell series resistance compensation, in ohms. + attributes: + unit: + name: unit + description: Unit of measurement for whole_cell_series_resistance_comp, which + is fixed to 'ohms'. + range: text + VoltageClampStimulusSeries: + name: VoltageClampStimulusSeries + description: Stimulus voltage applied during a voltage clamp recording. + is_a: PatchClampSeries + attributes: + data: + name: data + description: Stimulus voltage applied. + multivalued: false + range: VoltageClampStimulusSeries_data + required: true + VoltageClampStimulusSeries_data: + name: VoltageClampStimulusSeries_data + description: Stimulus voltage applied. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'volts'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + IntracellularElectrode: + name: IntracellularElectrode + description: An intracellular electrode and its metadata. + is_a: NWBContainer + attributes: + cell_id: + name: cell_id + description: unique ID of the cell + multivalued: false + range: IntracellularElectrode_cell_id + required: false + description: + name: description + description: Description of electrode (e.g., whole-cell, sharp, etc.). + multivalued: false + range: IntracellularElectrode_description + required: true + filtering: + name: filtering + description: Electrode specific filtering. + multivalued: false + range: IntracellularElectrode_filtering + required: false + initial_access_resistance: + name: initial_access_resistance + description: Initial access resistance. + multivalued: false + range: IntracellularElectrode_initial_access_resistance + required: false + location: + name: location + description: Location of the electrode. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + multivalued: false + range: IntracellularElectrode_location + required: false + resistance: + name: resistance + description: Electrode resistance, in ohms. + multivalued: false + range: IntracellularElectrode_resistance + required: false + seal: + name: seal + description: Information about seal used for recording. + multivalued: false + range: IntracellularElectrode_seal + required: false + slice: + name: slice + description: Information about slice used for recording. + multivalued: false + range: IntracellularElectrode_slice + required: false + IntracellularElectrode_cell_id: + name: IntracellularElectrode_cell_id + description: unique ID of the cell + IntracellularElectrode_description: + name: IntracellularElectrode_description + description: Description of electrode (e.g., whole-cell, sharp, etc.). + IntracellularElectrode_filtering: + name: IntracellularElectrode_filtering + description: Electrode specific filtering. + IntracellularElectrode_initial_access_resistance: + name: IntracellularElectrode_initial_access_resistance + description: Initial access resistance. + IntracellularElectrode_location: + name: IntracellularElectrode_location + description: Location of the electrode. Specify the area, layer, comments on estimation + of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names + for anatomical regions when possible. + IntracellularElectrode_resistance: + name: IntracellularElectrode_resistance + description: Electrode resistance, in ohms. + IntracellularElectrode_seal: + name: IntracellularElectrode_seal + description: Information about seal used for recording. + IntracellularElectrode_slice: + name: IntracellularElectrode_slice + description: Information about slice used for recording. + SweepTable: + name: SweepTable + description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable + is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions + tables provide enhanced support for experiment metadata.' + is_a: DynamicTable + attributes: + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + multivalued: false + range: SweepTable_sweep_number + required: true + series: + name: series + description: The PatchClampSeries with the sweep number in that row. + multivalued: false + range: SweepTable_series + required: true + series_index: + name: series_index + description: Index for series. + multivalued: false + range: SweepTable_series_index + required: true + SweepTable_sweep_number: + name: SweepTable_sweep_number + description: Sweep number of the PatchClampSeries in that row. + is_a: VectorData + SweepTable_series: + name: SweepTable_series + description: The PatchClampSeries with the sweep number in that row. + is_a: VectorData + SweepTable_series_index: + name: SweepTable_series_index + description: Index for series. + is_a: VectorIndex + IntracellularElectrodesTable: + name: IntracellularElectrodesTable + description: Table for storing intracellular electrode related metadata. + is_a: DynamicTable + attributes: + description: + name: description + description: Description of what is in this dynamic table. + range: text + electrode: + name: electrode + description: Column for storing the reference to the intracellular electrode. + multivalued: false + range: IntracellularElectrodesTable_electrode + required: true + IntracellularElectrodesTable_electrode: + name: IntracellularElectrodesTable_electrode + description: Column for storing the reference to the intracellular electrode. + is_a: VectorData + IntracellularStimuliTable: + name: IntracellularStimuliTable + description: Table for storing intracellular stimulus related metadata. + is_a: DynamicTable + attributes: + description: + name: description + description: Description of what is in this dynamic table. + range: text + stimulus: + name: stimulus + description: Column storing the reference to the recorded stimulus for the + recording (rows). + multivalued: false + range: IntracellularStimuliTable_stimulus + required: true + IntracellularStimuliTable_stimulus: + name: IntracellularStimuliTable_stimulus + description: Column storing the reference to the recorded stimulus for the recording + (rows). + is_a: TimeSeriesReferenceVectorData + IntracellularResponsesTable: + name: IntracellularResponsesTable + description: Table for storing intracellular response related metadata. + is_a: DynamicTable + attributes: + description: + name: description + description: Description of what is in this dynamic table. + range: text + response: + name: response + description: Column storing the reference to the recorded response for the + recording (rows) + multivalued: false + range: IntracellularResponsesTable_response + required: true + IntracellularResponsesTable_response: + name: IntracellularResponsesTable_response + description: Column storing the reference to the recorded response for the recording + (rows) + is_a: TimeSeriesReferenceVectorData + IntracellularRecordingsTable: + name: IntracellularRecordingsTable + description: A table to group together a stimulus and response from a single electrode + and a single simultaneous recording. Each row in the table represents a single + recording consisting typically of a stimulus and a corresponding response. In + some cases, however, only a stimulus or a response is recorded as part of an + experiment. In this case, both the stimulus and response will point to the same + TimeSeries while the idx_start and count of the invalid column will be set to + -1, thus, indicating that no values have been recorded for the stimulus or response, + respectively. Note, a recording MUST contain at least a stimulus or a response. + Typically the stimulus and response are PatchClampSeries. However, the use of + AD/DA channels that are not associated to an electrode is also common in intracellular + electrophysiology, in which case other TimeSeries may be used. + is_a: AlignedDynamicTable + attributes: + description: + name: description + description: Description of the contents of this table. Inherited from AlignedDynamicTable + and overwritten here to fix the value of the attribute. + range: text + electrodes: + name: electrodes + description: Table for storing intracellular electrode related metadata. + multivalued: false + range: IntracellularRecordingsTable_electrodes + required: true + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + multivalued: false + range: IntracellularRecordingsTable_stimuli + required: true + responses: + name: responses + description: Table for storing intracellular response related metadata. + multivalued: false + range: IntracellularRecordingsTable_responses + required: true + IntracellularRecordingsTable_electrodes: + name: IntracellularRecordingsTable_electrodes + description: Table for storing intracellular electrode related metadata. + is_a: IntracellularElectrodesTable + IntracellularRecordingsTable_stimuli: + name: IntracellularRecordingsTable_stimuli + description: Table for storing intracellular stimulus related metadata. + is_a: IntracellularStimuliTable + IntracellularRecordingsTable_responses: + name: IntracellularRecordingsTable_responses + description: Table for storing intracellular response related metadata. + is_a: IntracellularResponsesTable + SimultaneousRecordingsTable: + name: SimultaneousRecordingsTable + description: A table for grouping different intracellular recordings from the + IntracellularRecordingsTable table together that were recorded simultaneously + from different electrodes. + is_a: DynamicTable + attributes: + recordings: + name: recordings + description: A reference to one or more rows in the IntracellularRecordingsTable + table. + multivalued: false + range: SimultaneousRecordingsTable_recordings + required: true + recordings_index: + name: recordings_index + description: Index dataset for the recordings column. + multivalued: false + range: SimultaneousRecordingsTable_recordings_index + required: true + SimultaneousRecordingsTable_recordings: + name: SimultaneousRecordingsTable_recordings + description: A reference to one or more rows in the IntracellularRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + table: + name: table + description: Reference to the IntracellularRecordingsTable table that this + table region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: IntracellularRecordingsTable + SimultaneousRecordingsTable_recordings_index: + name: SimultaneousRecordingsTable_recordings_index + description: Index dataset for the recordings column. + is_a: VectorIndex + SequentialRecordingsTable: + name: SequentialRecordingsTable + description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable + table together. This is typically used to group together sequential recordings + where a sequence of stimuli of the same type with varying parameters have been + presented in a sequence. + is_a: DynamicTable + attributes: + simultaneous_recordings: + name: simultaneous_recordings + description: A reference to one or more rows in the SimultaneousRecordingsTable + table. + multivalued: false + range: SequentialRecordingsTable_simultaneous_recordings + required: true + simultaneous_recordings_index: + name: simultaneous_recordings_index + description: Index dataset for the simultaneous_recordings column. + multivalued: false + range: SequentialRecordingsTable_simultaneous_recordings_index + required: true + stimulus_type: + name: stimulus_type + description: The type of stimulus used for the sequential recording. + multivalued: false + range: SequentialRecordingsTable_stimulus_type + required: true + SequentialRecordingsTable_simultaneous_recordings: + name: SequentialRecordingsTable_simultaneous_recordings + description: A reference to one or more rows in the SimultaneousRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + table: + name: table + description: Reference to the SimultaneousRecordingsTable table that this + table region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: SimultaneousRecordingsTable + SequentialRecordingsTable_simultaneous_recordings_index: + name: SequentialRecordingsTable_simultaneous_recordings_index + description: Index dataset for the simultaneous_recordings column. + is_a: VectorIndex + SequentialRecordingsTable_stimulus_type: + name: SequentialRecordingsTable_stimulus_type + description: The type of stimulus used for the sequential recording. + is_a: VectorData + RepetitionsTable: + name: RepetitionsTable + description: A table for grouping different sequential intracellular recordings + together. With each SequentialRecording typically representing a particular + type of stimulus, the RepetitionsTable table is typically used to group sets + of stimuli applied in sequence. + is_a: DynamicTable + attributes: + sequential_recordings: + name: sequential_recordings + description: A reference to one or more rows in the SequentialRecordingsTable + table. + multivalued: false + range: RepetitionsTable_sequential_recordings + required: true + sequential_recordings_index: + name: sequential_recordings_index + description: Index dataset for the sequential_recordings column. + multivalued: false + range: RepetitionsTable_sequential_recordings_index + required: true + RepetitionsTable_sequential_recordings: + name: RepetitionsTable_sequential_recordings + description: A reference to one or more rows in the SequentialRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + table: + name: table + description: Reference to the SequentialRecordingsTable table that this table + region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: SequentialRecordingsTable + RepetitionsTable_sequential_recordings_index: + name: RepetitionsTable_sequential_recordings_index + description: Index dataset for the sequential_recordings column. + is_a: VectorIndex + ExperimentalConditionsTable: + name: ExperimentalConditionsTable + description: A table for grouping different intracellular recording repetitions + together that belong to the same experimental condition. + is_a: DynamicTable + attributes: + repetitions: + name: repetitions + description: A reference to one or more rows in the RepetitionsTable table. + multivalued: false + range: ExperimentalConditionsTable_repetitions + required: true + repetitions_index: + name: repetitions_index + description: Index dataset for the repetitions column. + multivalued: false + range: ExperimentalConditionsTable_repetitions_index + required: true + ExperimentalConditionsTable_repetitions: + name: ExperimentalConditionsTable_repetitions + description: A reference to one or more rows in the RepetitionsTable table. + is_a: DynamicTableRegion + attributes: + table: + name: table + description: Reference to the RepetitionsTable table that this table region + applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: RepetitionsTable + ExperimentalConditionsTable_repetitions_index: + name: ExperimentalConditionsTable_repetitions_index + description: Index dataset for the repetitions column. + is_a: VectorIndex diff --git a/nwb_linkml/schema/core.nwb.image.yaml b/nwb_linkml/schema/core.nwb.image.yaml new file mode 100644 index 0000000..16bf0c7 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.image.yaml @@ -0,0 +1,353 @@ +name: core.nwb.image +id: core.nwb.image +imports: +- core.nwb.base +- nwb.language +default_prefix: core.nwb.image/ +classes: + GrayscaleImage: + name: GrayscaleImage + description: A grayscale image. + is_a: Image + attributes: + array: + name: array + range: GrayscaleImage_Array + GrayscaleImage_Array: + name: GrayscaleImage_Array + is_a: Arraylike + attributes: + x: + name: x + range: numeric + required: false + y: + name: y + range: numeric + required: false + RGBImage: + name: RGBImage + description: A color image. + is_a: Image + attributes: + array: + name: array + range: RGBImage_Array + RGBImage_Array: + name: RGBImage_Array + is_a: Arraylike + attributes: + x: + name: x + range: numeric + required: false + y: + name: y + range: numeric + required: false + r, g, b: + name: r, g, b + range: numeric + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + RGBAImage: + name: RGBAImage + description: A color image with transparency. + is_a: Image + attributes: + array: + name: array + range: RGBAImage_Array + RGBAImage_Array: + name: RGBAImage_Array + is_a: Arraylike + attributes: + x: + name: x + range: numeric + required: false + y: + name: y + range: numeric + required: false + r, g, b, a: + name: r, g, b, a + range: numeric + required: false + minimum_cardinality: 4 + maximum_cardinality: 4 + ImageSeries: + name: ImageSeries + description: General image data that is common between acquisition and stimulus + time series. Sometimes the image data is stored in the file in a raw format + while other times it will be stored as a series of external image files in the + host file system. The data field will either be binary data, if the data is + stored in the NWB file, or empty, if the data is stored in an external image + stack. [frame][x][y] or [frame][x][y][z]. + is_a: TimeSeries + attributes: + data: + name: data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + multivalued: false + range: ImageSeries_data + required: true + dimension: + name: dimension + description: Number of pixels on x, y, (and z) axes. + multivalued: false + range: ImageSeries_dimension + required: false + external_file: + name: external_file + description: Paths to one or more external file(s). The field is only present + if format='external'. This is only relevant if the image series is stored + in the file system as one or more image file(s). This field should NOT be + used if the image is stored in another NWB file and that file is linked + to this file. + multivalued: false + range: ImageSeries_external_file + required: false + format: + name: format + description: Format of image. If this is 'external', then the attribute 'external_file' + contains the path information to the image files. If this is 'raw', then + the raw (single-channel) binary data is stored in the 'data' dataset. If + this attribute is not present, then the default format='raw' case is assumed. + multivalued: false + range: ImageSeries_format + required: false + ImageSeries_data: + name: ImageSeries_data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + attributes: + array: + name: array + range: ImageSeries_data_Array + ImageSeries_data_Array: + name: ImageSeries_data_Array + is_a: Arraylike + attributes: + frame: + name: frame + range: numeric + required: true + x: + name: x + range: numeric + required: true + y: + name: y + range: numeric + required: true + z: + name: z + range: numeric + required: false + ImageSeries_dimension: + name: ImageSeries_dimension + description: Number of pixels on x, y, (and z) axes. + attributes: + array: + name: array + range: ImageSeries_dimension_Array + ImageSeries_dimension_Array: + name: ImageSeries_dimension_Array + is_a: Arraylike + attributes: + rank: + name: rank + range: int32 + required: true + ImageSeries_external_file: + name: ImageSeries_external_file + description: Paths to one or more external file(s). The field is only present + if format='external'. This is only relevant if the image series is stored in + the file system as one or more image file(s). This field should NOT be used + if the image is stored in another NWB file and that file is linked to this file. + attributes: + starting_frame: + name: starting_frame + description: Each external image may contain one or more consecutive frames + of the full ImageSeries. This attribute serves as an index to indicate which + frames each file contains, to facilitate random access. The 'starting_frame' + attribute, hence, contains a list of frame numbers within the full ImageSeries + of the first frame of each file listed in the parent 'external_file' dataset. + Zero-based indexing is used (hence, the first element will always be zero). + For example, if the 'external_file' dataset has three paths to files and + the first file has 5 frames, the second file has 10 frames, and the third + file has 20 frames, then this attribute will have values [0, 5, 15]. If + there is a single external file that holds all of the frames of the ImageSeries + (and so there is a single element in the 'external_file' dataset), then + this attribute should have value [0]. + range: int32 + array: + name: array + range: ImageSeries_external_file_Array + ImageSeries_external_file_Array: + name: ImageSeries_external_file_Array + is_a: Arraylike + attributes: + num_files: + name: num_files + range: text + required: true + ImageSeries_format: + name: ImageSeries_format + description: Format of image. If this is 'external', then the attribute 'external_file' + contains the path information to the image files. If this is 'raw', then the + raw (single-channel) binary data is stored in the 'data' dataset. If this attribute + is not present, then the default format='raw' case is assumed. + ImageMaskSeries: + name: ImageMaskSeries + description: An alpha mask that is applied to a presented visual stimulus. The + 'data' array contains an array of mask values that are applied to the displayed + image. Mask values are stored as RGBA. Mask can vary with time. The timestamps + array indicates the starting time of a mask, and that mask pattern continues + until it's explicitly changed. + is_a: ImageSeries + OpticalSeries: + name: OpticalSeries + description: Image data that is presented or recorded. A stimulus template movie + will be stored only as an image. When the image is presented as stimulus, additional + data is required, such as field of view (e.g., how much of the visual field + the image covers, or how what is the area of the target being imaged). If the + OpticalSeries represents acquired imaging data, orientation is also important. + is_a: ImageSeries + attributes: + distance: + name: distance + description: Distance from camera/monitor to target/eye. + multivalued: false + range: OpticalSeries_distance + required: false + field_of_view: + name: field_of_view + description: Width, height and depth of image, or imaged area, in meters. + multivalued: false + range: OpticalSeries_field_of_view + required: false + data: + name: data + description: Images presented to subject, either grayscale or RGB + multivalued: false + range: OpticalSeries_data + required: true + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + multivalued: false + range: OpticalSeries_orientation + required: false + OpticalSeries_distance: + name: OpticalSeries_distance + description: Distance from camera/monitor to target/eye. + OpticalSeries_field_of_view: + name: OpticalSeries_field_of_view + description: Width, height and depth of image, or imaged area, in meters. + attributes: + array: + name: array + range: OpticalSeries_field_of_view_Array + OpticalSeries_field_of_view_Array: + name: OpticalSeries_field_of_view_Array + is_a: Arraylike + attributes: + width, height: + name: width, height + range: float32 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + width, height, depth: + name: width, height, depth + range: float32 + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + OpticalSeries_data: + name: OpticalSeries_data + description: Images presented to subject, either grayscale or RGB + attributes: + array: + name: array + range: OpticalSeries_data_Array + OpticalSeries_data_Array: + name: OpticalSeries_data_Array + is_a: Arraylike + attributes: + frame: + name: frame + range: numeric + required: true + x: + name: x + range: numeric + required: true + y: + name: y + range: numeric + required: true + r, g, b: + name: r, g, b + range: numeric + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + OpticalSeries_orientation: + name: OpticalSeries_orientation + description: Description of image relative to some reference frame (e.g., which + way is up). Must also specify frame of reference. + IndexSeries: + name: IndexSeries + description: Stores indices to image frames stored in an ImageSeries. The purpose + of the IndexSeries is to allow a static image stack to be stored in an Images + object, and the images in the stack to be referenced out-of-order. This can + be for the display of individual images, or of movie segments (as a movie is + simply a series of images). The data field stores the index of the frame in + the referenced Images object, and the timestamps array indicates when that image + was displayed. + is_a: TimeSeries + attributes: + data: + name: data + description: Index of the image (using zero-indexing) in the linked Images + object. + multivalued: false + range: IndexSeries_data + required: true + IndexSeries_data: + name: IndexSeries_data + description: Index of the image (using zero-indexing) in the linked Images object. + attributes: + conversion: + name: conversion + description: This field is unused by IndexSeries. + range: float32 + resolution: + name: resolution + description: This field is unused by IndexSeries. + range: float32 + offset: + name: offset + description: This field is unused by IndexSeries. + range: float32 + unit: + name: unit + description: This field is unused by IndexSeries and has the value N/A. + range: text + array: + name: array + range: IndexSeries_data_Array + IndexSeries_data_Array: + name: IndexSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: uint32 + required: true diff --git a/nwb_linkml/schema/core.nwb.misc.yaml b/nwb_linkml/schema/core.nwb.misc.yaml new file mode 100644 index 0000000..d71a3d5 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.misc.yaml @@ -0,0 +1,614 @@ +name: core.nwb.misc +id: core.nwb.misc +imports: +- core.nwb.base +- hdmf-common.table +- nwb.language +default_prefix: core.nwb.misc/ +classes: + AbstractFeatureSeries: + name: AbstractFeatureSeries + description: Abstract features, such as quantitative descriptions of sensory stimuli. + The TimeSeries::data field is a 2D array, storing those features (e.g., for + visual grating stimulus this might be orientation, spatial frequency and contrast). + Null stimuli (eg, uniform gray) can be marked as being an independent feature + (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, + or through use of the TimeSeries::control fields. A set of features is considered + to persist until the next set of features is defined. The final set of features + stored should be the null set. This is useful when storing the raw stimulus + is impractical. + is_a: TimeSeries + attributes: + data: + name: data + description: Values of each feature at each time. + multivalued: false + range: AbstractFeatureSeries_data + required: true + feature_units: + name: feature_units + description: Units of each feature. + multivalued: false + range: AbstractFeatureSeries_feature_units + required: false + features: + name: features + description: Description of the features represented in TimeSeries::data. + multivalued: false + range: AbstractFeatureSeries_features + required: true + AbstractFeatureSeries_data: + name: AbstractFeatureSeries_data + description: Values of each feature at each time. + attributes: + unit: + name: unit + description: Since there can be different units for different features, store + the units in 'feature_units'. The default value for this attribute is "see + 'feature_units'". + range: text + array: + name: array + range: AbstractFeatureSeries_data_Array + AbstractFeatureSeries_data_Array: + name: AbstractFeatureSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + num_features: + name: num_features + range: numeric + required: false + AbstractFeatureSeries_feature_units: + name: AbstractFeatureSeries_feature_units + description: Units of each feature. + attributes: + array: + name: array + range: AbstractFeatureSeries_feature_units_Array + AbstractFeatureSeries_feature_units_Array: + name: AbstractFeatureSeries_feature_units_Array + is_a: Arraylike + attributes: + num_features: + name: num_features + range: text + required: true + AbstractFeatureSeries_features: + name: AbstractFeatureSeries_features + description: Description of the features represented in TimeSeries::data. + attributes: + array: + name: array + range: AbstractFeatureSeries_features_Array + AbstractFeatureSeries_features_Array: + name: AbstractFeatureSeries_features_Array + is_a: Arraylike + attributes: + num_features: + name: num_features + range: text + required: true + AnnotationSeries: + name: AnnotationSeries + description: Stores user annotations made during an experiment. The data[] field + stores a text array, and timestamps are stored for each annotation (ie, interval=1). + This is largely an alias to a standard TimeSeries storing a text array but that + is identifiable as storing annotations in a machine-readable way. + is_a: TimeSeries + attributes: + data: + name: data + description: Annotations made during an experiment. + multivalued: false + range: AnnotationSeries_data + required: true + AnnotationSeries_data: + name: AnnotationSeries_data + description: Annotations made during an experiment. + attributes: + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + range: text + array: + name: array + range: AnnotationSeries_data_Array + AnnotationSeries_data_Array: + name: AnnotationSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: text + required: true + IntervalSeries: + name: IntervalSeries + description: Stores intervals of data. The timestamps field stores the beginning + and end of intervals. The data field stores whether the interval just started + (>0 value) or ended (<0 value). Different interval types can be represented + in the same series by using multiple key values (eg, 1 for feature A, 2 for + feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This + is largely an alias of a standard TimeSeries but that is identifiable as representing + time intervals in a machine-readable way. + is_a: TimeSeries + attributes: + data: + name: data + description: Use values >0 if interval started, <0 if interval ended. + multivalued: false + range: IntervalSeries_data + required: true + IntervalSeries_data: + name: IntervalSeries_data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + range: text + array: + name: array + range: IntervalSeries_data_Array + IntervalSeries_data_Array: + name: IntervalSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: int8 + required: true + DecompositionSeries: + name: DecompositionSeries + description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. + is_a: TimeSeries + attributes: + data: + name: data + description: Data decomposed into frequency bands. + multivalued: false + range: DecompositionSeries_data + required: true + metric: + name: metric + description: The metric used, e.g. phase, amplitude, power. + multivalued: false + range: DecompositionSeries_metric + required: true + source_channels: + name: source_channels + description: DynamicTableRegion pointer to the channels that this decomposition + series was generated from. + multivalued: false + range: DecompositionSeries_source_channels + required: false + bands: + name: bands + description: Table for describing the bands that this series was generated + from. There should be one row in this table for each band. + multivalued: false + range: DecompositionSeries_bands + required: true + DecompositionSeries_data: + name: DecompositionSeries_data + description: Data decomposed into frequency bands. + attributes: + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + array: + name: array + range: DecompositionSeries_data_Array + DecompositionSeries_data_Array: + name: DecompositionSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: false + num_channels: + name: num_channels + range: numeric + required: false + num_bands: + name: num_bands + range: numeric + required: false + DecompositionSeries_metric: + name: DecompositionSeries_metric + description: The metric used, e.g. phase, amplitude, power. + DecompositionSeries_source_channels: + name: DecompositionSeries_source_channels + description: DynamicTableRegion pointer to the channels that this decomposition + series was generated from. + is_a: DynamicTableRegion + DecompositionSeries_bands: + name: DecompositionSeries_bands + description: Table for describing the bands that this series was generated from. + There should be one row in this table for each band. + is_a: DynamicTable + attributes: + band_name: + name: band_name + description: Name of the band, e.g. theta. + multivalued: false + range: DecompositionSeries_bands_band_name + required: true + band_limits: + name: band_limits + description: Low and high limit of each band in Hz. If it is a Gaussian filter, + use 2 SD on either side of the center. + multivalued: false + range: DecompositionSeries_bands_band_limits + required: true + band_mean: + name: band_mean + description: The mean Gaussian filters, in Hz. + multivalued: false + range: DecompositionSeries_bands_band_mean + required: true + band_stdev: + name: band_stdev + description: The standard deviation of Gaussian filters, in Hz. + multivalued: false + range: DecompositionSeries_bands_band_stdev + required: true + DecompositionSeries_bands_band_name: + name: DecompositionSeries_bands_band_name + description: Name of the band, e.g. theta. + is_a: VectorData + DecompositionSeries_bands_band_limits: + name: DecompositionSeries_bands_band_limits + description: Low and high limit of each band in Hz. If it is a Gaussian filter, + use 2 SD on either side of the center. + is_a: VectorData + attributes: + array: + name: array + range: DecompositionSeries_bands_band_limits_Array + DecompositionSeries_bands_band_limits_Array: + name: DecompositionSeries_bands_band_limits_Array + is_a: Arraylike + attributes: + num_bands: + name: num_bands + range: float32 + required: false + low, high: + name: low, high + range: float32 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + DecompositionSeries_bands_band_mean: + name: DecompositionSeries_bands_band_mean + description: The mean Gaussian filters, in Hz. + is_a: VectorData + attributes: + array: + name: array + range: DecompositionSeries_bands_band_mean_Array + DecompositionSeries_bands_band_mean_Array: + name: DecompositionSeries_bands_band_mean_Array + is_a: Arraylike + attributes: + num_bands: + name: num_bands + range: float32 + required: true + DecompositionSeries_bands_band_stdev: + name: DecompositionSeries_bands_band_stdev + description: The standard deviation of Gaussian filters, in Hz. + is_a: VectorData + attributes: + array: + name: array + range: DecompositionSeries_bands_band_stdev_Array + DecompositionSeries_bands_band_stdev_Array: + name: DecompositionSeries_bands_band_stdev_Array + is_a: Arraylike + attributes: + num_bands: + name: num_bands + range: float32 + required: true + Units: + name: Units + description: Data about spiking units. Event times of observed units (e.g. cell, + synapse, etc.) should be concatenated and stored in spike_times. + is_a: DynamicTable + attributes: + spike_times_index: + name: spike_times_index + description: Index into the spike_times dataset. + multivalued: false + range: Units_spike_times_index + required: false + spike_times: + name: spike_times + description: Spike times for each unit in seconds. + multivalued: false + range: Units_spike_times + required: false + obs_intervals_index: + name: obs_intervals_index + description: Index into the obs_intervals dataset. + multivalued: false + range: Units_obs_intervals_index + required: false + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. + multivalued: false + range: Units_obs_intervals + required: false + electrodes_index: + name: electrodes_index + description: Index into electrodes. + multivalued: false + range: Units_electrodes_index + required: false + electrodes: + name: electrodes + description: Electrode that each spike unit came from, specified using a DynamicTableRegion. + multivalued: false + range: Units_electrodes + required: false + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. + multivalued: false + range: Units_electrode_group + required: false + waveform_mean: + name: waveform_mean + description: Spike waveform mean for each spike unit. + multivalued: false + range: Units_waveform_mean + required: false + waveform_sd: + name: waveform_sd + description: Spike waveform standard deviation for each spike unit. + multivalued: false + range: Units_waveform_sd + required: false + waveforms: + name: waveforms + description: Individual waveforms for each spike on each electrode. This is + a doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each + waveform was recorded from a different electrode. The 'waveforms_index_index' + column indexes the 'waveforms_index' column to indicate which spike events + belong to a given unit. For example, if the 'waveforms_index_index' column + has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' + column correspond to the 2 spike events of the first unit, the next 3 elements + of the 'waveforms_index' column correspond to the 3 spike events of the + second unit, and the next 1 element of the 'waveforms_index' column corresponds + to the 1 spike event of the third unit. If the 'waveforms_index' column + has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' + column contain the 3 spike waveforms that were recorded from 3 different + electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N + is the number of spike events. The number of electrodes for each spike event + should be the same within a given unit. The 'electrodes' column should be + used to indicate which electrodes are associated with each unit, and the + order of the waveforms within a given unit x spike event should be in the + same order as the electrodes referenced in the 'electrodes' column of this + table. The number of samples for each waveform must be the same. + multivalued: false + range: Units_waveforms + required: false + waveforms_index: + name: waveforms_index + description: Index into the waveforms dataset. One value for every spike event. + See 'waveforms' for more detail. + multivalued: false + range: Units_waveforms_index + required: false + waveforms_index_index: + name: waveforms_index_index + description: Index into the waveforms_index dataset. One value for every unit + (row in the table). See 'waveforms' for more detail. + multivalued: false + range: Units_waveforms_index_index + required: false + Units_spike_times_index: + name: Units_spike_times_index + description: Index into the spike_times dataset. + is_a: VectorIndex + Units_spike_times: + name: Units_spike_times + description: Spike times for each unit in seconds. + is_a: VectorData + attributes: + resolution: + name: resolution + description: The smallest possible difference between two spike times. Usually + 1 divided by the acquisition sampling rate from which spike times were extracted, + but could be larger if the acquisition time series was downsampled or smaller + if the acquisition time series was smoothed/interpolated and it is possible + for the spike time to be between samples. + range: float64 + Units_obs_intervals_index: + name: Units_obs_intervals_index + description: Index into the obs_intervals dataset. + is_a: VectorIndex + Units_obs_intervals: + name: Units_obs_intervals + description: Observation intervals for each unit. + is_a: VectorData + attributes: + array: + name: array + range: Units_obs_intervals_Array + Units_obs_intervals_Array: + name: Units_obs_intervals_Array + is_a: Arraylike + attributes: + num_intervals: + name: num_intervals + range: float64 + required: false + start|end: + name: start|end + range: float64 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + Units_electrodes_index: + name: Units_electrodes_index + description: Index into electrodes. + is_a: VectorIndex + Units_electrodes: + name: Units_electrodes + description: Electrode that each spike unit came from, specified using a DynamicTableRegion. + is_a: DynamicTableRegion + Units_electrode_group: + name: Units_electrode_group + description: Electrode group that each spike unit came from. + is_a: VectorData + Units_waveform_mean: + name: Units_waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + range: text + array: + name: array + range: Units_waveform_mean_Array + Units_waveform_mean_Array: + name: Units_waveform_mean_Array + is_a: Arraylike + attributes: + num_units: + name: num_units + range: float32 + required: true + num_samples: + name: num_samples + range: float32 + required: true + num_electrodes: + name: num_electrodes + range: float32 + required: false + Units_waveform_sd: + name: Units_waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + range: text + array: + name: array + range: Units_waveform_sd_Array + Units_waveform_sd_Array: + name: Units_waveform_sd_Array + is_a: Arraylike + attributes: + num_units: + name: num_units + range: float32 + required: true + num_samples: + name: num_samples + range: float32 + required: true + num_electrodes: + name: num_electrodes + range: float32 + required: false + Units_waveforms: + name: Units_waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + range: text + array: + name: array + range: Units_waveforms_Array + Units_waveforms_Array: + name: Units_waveforms_Array + is_a: Arraylike + attributes: + num_waveforms: + name: num_waveforms + range: numeric + required: false + num_samples: + name: num_samples + range: numeric + required: false + Units_waveforms_index: + name: Units_waveforms_index + description: Index into the waveforms dataset. One value for every spike event. + See 'waveforms' for more detail. + is_a: VectorIndex + Units_waveforms_index_index: + name: Units_waveforms_index_index + description: Index into the waveforms_index dataset. One value for every unit + (row in the table). See 'waveforms' for more detail. + is_a: VectorIndex diff --git a/nwb_linkml/schema/core.nwb.ogen.yaml b/nwb_linkml/schema/core.nwb.ogen.yaml new file mode 100644 index 0000000..4d3a44d --- /dev/null +++ b/nwb_linkml/schema/core.nwb.ogen.yaml @@ -0,0 +1,73 @@ +name: core.nwb.ogen +id: core.nwb.ogen +imports: +- core.nwb.base +- nwb.language +default_prefix: core.nwb.ogen/ +classes: + OptogeneticSeries: + name: OptogeneticSeries + description: An optogenetic stimulus. + is_a: TimeSeries + attributes: + data: + name: data + description: Applied power for optogenetic stimulus, in watts. + multivalued: false + range: OptogeneticSeries_data + required: true + OptogeneticSeries_data: + name: OptogeneticSeries_data + description: Applied power for optogenetic stimulus, in watts. + attributes: + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + range: text + array: + name: array + range: OptogeneticSeries_data_Array + OptogeneticSeries_data_Array: + name: OptogeneticSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + OptogeneticStimulusSite: + name: OptogeneticStimulusSite + description: A site of optogenetic stimulation. + is_a: NWBContainer + attributes: + description: + name: description + description: Description of stimulation site. + multivalued: false + range: OptogeneticStimulusSite_description + required: true + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + multivalued: false + range: OptogeneticStimulusSite_excitation_lambda + required: true + location: + name: location + description: Location of the stimulation site. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + multivalued: false + range: OptogeneticStimulusSite_location + required: true + OptogeneticStimulusSite_description: + name: OptogeneticStimulusSite_description + description: Description of stimulation site. + OptogeneticStimulusSite_excitation_lambda: + name: OptogeneticStimulusSite_excitation_lambda + description: Excitation wavelength, in nm. + OptogeneticStimulusSite_location: + name: OptogeneticStimulusSite_location + description: Location of the stimulation site. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard + atlas names for anatomical regions when possible. diff --git a/nwb_linkml/schema/core.nwb.ophys.yaml b/nwb_linkml/schema/core.nwb.ophys.yaml new file mode 100644 index 0000000..e1ad2bd --- /dev/null +++ b/nwb_linkml/schema/core.nwb.ophys.yaml @@ -0,0 +1,571 @@ +name: core.nwb.ophys +id: core.nwb.ophys +imports: +- core.nwb.image +- core.nwb.base +- hdmf-common.table +- nwb.language +default_prefix: core.nwb.ophys/ +classes: + OnePhotonSeries: + name: OnePhotonSeries + description: Image stack recorded over time from 1-photon microscope. + is_a: ImageSeries + attributes: + pmt_gain: + name: pmt_gain + description: Photomultiplier gain. + range: float32 + scan_line_rate: + name: scan_line_rate + description: Lines imaged per second. This is also stored in /general/optophysiology + but is kept here as it is useful information for analysis, and so good to + be stored w/ the actual data. + range: float32 + exposure_time: + name: exposure_time + description: Exposure time of the sample; often the inverse of the frequency. + range: float32 + binning: + name: binning + description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + range: uint8 + power: + name: power + description: Power of the excitation in mW, if known. + range: float32 + intensity: + name: intensity + description: Intensity of the excitation in mW/mm^2, if known. + range: float32 + TwoPhotonSeries: + name: TwoPhotonSeries + description: Image stack recorded over time from 2-photon microscope. + is_a: ImageSeries + attributes: + pmt_gain: + name: pmt_gain + description: Photomultiplier gain. + range: float32 + scan_line_rate: + name: scan_line_rate + description: Lines imaged per second. This is also stored in /general/optophysiology + but is kept here as it is useful information for analysis, and so good to + be stored w/ the actual data. + range: float32 + field_of_view: + name: field_of_view + description: Width, height and depth of image, or imaged area, in meters. + multivalued: false + range: TwoPhotonSeries_field_of_view + required: false + TwoPhotonSeries_field_of_view: + name: TwoPhotonSeries_field_of_view + description: Width, height and depth of image, or imaged area, in meters. + attributes: + array: + name: array + range: TwoPhotonSeries_field_of_view_Array + TwoPhotonSeries_field_of_view_Array: + name: TwoPhotonSeries_field_of_view_Array + is_a: Arraylike + attributes: + width|height: + name: width|height + range: float32 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + width|height|depth: + name: width|height|depth + range: float32 + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + RoiResponseSeries: + name: RoiResponseSeries + description: ROI responses over an imaging plane. The first dimension represents + time. The second dimension, if present, represents ROIs. + is_a: TimeSeries + attributes: + data: + name: data + description: Signals from ROIs. + multivalued: false + range: RoiResponseSeries_data + required: true + rois: + name: rois + description: DynamicTableRegion referencing into an ROITable containing information + on the ROIs stored in this timeseries. + multivalued: false + range: RoiResponseSeries_rois + required: true + RoiResponseSeries_data: + name: RoiResponseSeries_data + description: Signals from ROIs. + attributes: + array: + name: array + range: RoiResponseSeries_data_Array + RoiResponseSeries_data_Array: + name: RoiResponseSeries_data_Array + is_a: Arraylike + attributes: + num_times: + name: num_times + range: numeric + required: true + num_ROIs: + name: num_ROIs + range: numeric + required: false + RoiResponseSeries_rois: + name: RoiResponseSeries_rois + description: DynamicTableRegion referencing into an ROITable containing information + on the ROIs stored in this timeseries. + is_a: DynamicTableRegion + DfOverF: + name: DfOverF + description: dF/F information about a region of interest (ROI). Storage hierarchy + of dF/F should be the same as for segmentation (i.e., same names for ROIs and + for image planes). + is_a: NWBDataInterface + attributes: + RoiResponseSeries: + name: RoiResponseSeries + description: RoiResponseSeries object(s) containing dF/F for a ROI. + multivalued: true + range: RoiResponseSeries + required: true + Fluorescence: + name: Fluorescence + description: Fluorescence information about a region of interest (ROI). Storage + hierarchy of fluorescence should be the same as for segmentation (ie, same names + for ROIs and for image planes). + is_a: NWBDataInterface + attributes: + RoiResponseSeries: + name: RoiResponseSeries + description: RoiResponseSeries object(s) containing fluorescence data for + a ROI. + multivalued: true + range: RoiResponseSeries + required: true + ImageSegmentation: + name: ImageSegmentation + description: Stores pixels in an image that represent different regions of interest + (ROIs) or masks. All segmentation for a given imaging plane is stored together, + with storage for multiple imaging planes (masks) supported. Each ROI is stored + in its own subgroup, with the ROI group containing both a 2D mask and a list + of pixels that make up this mask. Segments can also be used for masking neuropil. + If segmentation is allowed to change with time, a new imaging plane (or module) + is required and ROI names should remain consistent between them. + is_a: NWBDataInterface + attributes: + PlaneSegmentation: + name: PlaneSegmentation + description: Results from image segmentation of a specific imaging plane. + multivalued: true + range: PlaneSegmentation + required: true + PlaneSegmentation: + name: PlaneSegmentation + description: Results from image segmentation of a specific imaging plane. + is_a: DynamicTable + attributes: + image_mask: + name: image_mask + description: ROI masks for each ROI. Each image mask is the size of the original + imaging plane (or volume) and members of the ROI are finite non-zero. + multivalued: false + range: PlaneSegmentation_image_mask + required: false + pixel_mask_index: + name: pixel_mask_index + description: Index into pixel_mask. + multivalued: false + range: PlaneSegmentation_pixel_mask_index + required: false + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + multivalued: false + range: PlaneSegmentation_pixel_mask + required: false + voxel_mask_index: + name: voxel_mask_index + description: Index into voxel_mask. + multivalued: false + range: PlaneSegmentation_voxel_mask_index + required: false + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + multivalued: false + range: PlaneSegmentation_voxel_mask + required: false + reference_images: + name: reference_images + description: Image stacks that the segmentation masks apply to. + multivalued: false + range: PlaneSegmentation_reference_images + required: true + PlaneSegmentation_image_mask: + name: PlaneSegmentation_image_mask + description: ROI masks for each ROI. Each image mask is the size of the original + imaging plane (or volume) and members of the ROI are finite non-zero. + is_a: VectorData + attributes: + array: + name: array + range: PlaneSegmentation_image_mask_Array + PlaneSegmentation_image_mask_Array: + name: PlaneSegmentation_image_mask_Array + is_a: Arraylike + attributes: + num_roi: + name: num_roi + range: AnyType + required: true + num_x: + name: num_x + range: AnyType + required: true + num_y: + name: num_y + range: AnyType + required: true + num_z: + name: num_z + range: AnyType + required: false + PlaneSegmentation_pixel_mask_index: + name: PlaneSegmentation_pixel_mask_index + description: Index into pixel_mask. + is_a: VectorIndex + PlaneSegmentation_pixel_mask: + name: PlaneSegmentation_pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for the + ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + is_a: VectorData + PlaneSegmentation_voxel_mask_index: + name: PlaneSegmentation_voxel_mask_index + description: Index into voxel_mask. + is_a: VectorIndex + PlaneSegmentation_voxel_mask: + name: PlaneSegmentation_voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for the + ROI. Voxel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + is_a: VectorData + PlaneSegmentation_reference_images: + name: PlaneSegmentation_reference_images + description: Image stacks that the segmentation masks apply to. + attributes: + ImageSeries: + name: ImageSeries + description: One or more image stacks that the masks apply to (can be one-element + stack). + multivalued: true + range: ImageSeries + required: false + ImagingPlane: + name: ImagingPlane + description: An imaging plane and its metadata. + is_a: NWBContainer + attributes: + description: + name: description + description: Description of the imaging plane. + multivalued: false + range: ImagingPlane_description + required: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + multivalued: false + range: ImagingPlane_excitation_lambda + required: true + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + multivalued: false + range: ImagingPlane_imaging_rate + required: false + indicator: + name: indicator + description: Calcium indicator. + multivalued: false + range: ImagingPlane_indicator + required: true + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + multivalued: false + range: ImagingPlane_location + required: true + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + multivalued: false + range: ImagingPlane_manifold + required: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + multivalued: false + range: ImagingPlane_origin_coords + required: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + multivalued: false + range: ImagingPlane_grid_spacing + required: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + multivalued: false + range: ImagingPlane_reference_frame + required: false + OpticalChannel: + name: OpticalChannel + description: An optical channel used to record from an imaging plane. + multivalued: true + range: OpticalChannel + required: true + ImagingPlane_description: + name: ImagingPlane_description + description: Description of the imaging plane. + ImagingPlane_excitation_lambda: + name: ImagingPlane_excitation_lambda + description: Excitation wavelength, in nm. + ImagingPlane_imaging_rate: + name: ImagingPlane_imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + ImagingPlane_indicator: + name: ImagingPlane_indicator + description: Calcium indicator. + ImagingPlane_location: + name: ImagingPlane_location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard + atlas names for anatomical regions when possible. + ImagingPlane_manifold: + name: ImagingPlane_manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + array: + name: array + range: ImagingPlane_manifold_Array + ImagingPlane_manifold_Array: + name: ImagingPlane_manifold_Array + is_a: Arraylike + attributes: + height: + name: height + range: float32 + required: true + width: + name: width + range: float32 + required: true + x, y, z: + name: x, y, z + range: float32 + required: true + minimum_cardinality: 3 + maximum_cardinality: 3 + depth: + name: depth + range: float32 + required: false + ImagingPlane_origin_coords: + name: ImagingPlane_origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + array: + name: array + range: ImagingPlane_origin_coords_Array + ImagingPlane_origin_coords_Array: + name: ImagingPlane_origin_coords_Array + is_a: Arraylike + attributes: + x, y: + name: x, y + range: float32 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + x, y, z: + name: x, y, z + range: float32 + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + ImagingPlane_grid_spacing: + name: ImagingPlane_grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + array: + name: array + range: ImagingPlane_grid_spacing_Array + ImagingPlane_grid_spacing_Array: + name: ImagingPlane_grid_spacing_Array + is_a: Arraylike + attributes: + x, y: + name: x, y + range: float32 + required: false + minimum_cardinality: 2 + maximum_cardinality: 2 + x, y, z: + name: x, y, z + range: float32 + required: false + minimum_cardinality: 3 + maximum_cardinality: 3 + ImagingPlane_reference_frame: + name: ImagingPlane_reference_frame + description: Describes reference frame of origin_coords and grid_spacing. For + example, this can be a text description of the anatomical location and orientation + of the grid defined by origin_coords and grid_spacing or the vectors needed + to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). + This field is necessary to interpret origin_coords and grid_spacing. If origin_coords + and grid_spacing are not present, then this field is not required. For example, + if the microscope takes 10 x 10 x 2 images, where the first value of the data + matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, + the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and + larger numbers in x means more anterior, larger numbers in y means more rightward, + and larger numbers in z means more ventral, then enter the following -- origin_coords + = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin + coordinates are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + OpticalChannel: + name: OpticalChannel + description: An optical channel used to record from an imaging plane. + is_a: NWBContainer + attributes: + description: + name: description + description: Description or other notes about the channel. + multivalued: false + range: OpticalChannel_description + required: true + emission_lambda: + name: emission_lambda + description: Emission wavelength for channel, in nm. + multivalued: false + range: OpticalChannel_emission_lambda + required: true + OpticalChannel_description: + name: OpticalChannel_description + description: Description or other notes about the channel. + OpticalChannel_emission_lambda: + name: OpticalChannel_emission_lambda + description: Emission wavelength for channel, in nm. + MotionCorrection: + name: MotionCorrection + description: 'An image stack where all frames are shifted (registered) to a common + coordinate system, to account for movement and drift between frames. Note: each + frame at each point in time is assumed to be 2-D (has only x & y dimensions).' + is_a: NWBDataInterface + attributes: + CorrectedImageStack: + name: CorrectedImageStack + description: Reuslts from motion correction of an image stack. + multivalued: true + range: CorrectedImageStack + required: true + CorrectedImageStack: + name: CorrectedImageStack + description: Reuslts from motion correction of an image stack. + is_a: NWBDataInterface + attributes: + corrected: + name: corrected + description: Image stack with frames shifted to the common coordinates. + multivalued: false + range: CorrectedImageStack_corrected + required: true + xy_translation: + name: xy_translation + description: Stores the x,y delta necessary to align each frame to the common + coordinates, for example, to align each frame to a reference image. + multivalued: false + range: CorrectedImageStack_xy_translation + required: true + CorrectedImageStack_corrected: + name: CorrectedImageStack_corrected + description: Image stack with frames shifted to the common coordinates. + is_a: ImageSeries + CorrectedImageStack_xy_translation: + name: CorrectedImageStack_xy_translation + description: Stores the x,y delta necessary to align each frame to the common + coordinates, for example, to align each frame to a reference image. + is_a: TimeSeries diff --git a/nwb_linkml/schema/core.nwb.retinotopy.yaml b/nwb_linkml/schema/core.nwb.retinotopy.yaml new file mode 100644 index 0000000..3c7afa0 --- /dev/null +++ b/nwb_linkml/schema/core.nwb.retinotopy.yaml @@ -0,0 +1,333 @@ +name: core.nwb.retinotopy +id: core.nwb.retinotopy +imports: +- core.nwb.base +- nwb.language +default_prefix: core.nwb.retinotopy/ +classes: + ImagingRetinotopy: + name: ImagingRetinotopy + description: 'Intrinsic signal optical imaging or widefield imaging for measuring + retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of + responses to specific stimuli and a combined polarity map from which to identify + visual areas. This group does not store the raw responses imaged during retinotopic + mapping or the stimuli presented, but rather the resulting phase and power maps + after applying a Fourier transform on the averaged responses. Note: for data + consistency, all images and arrays are stored in the format [row][column] and + [row, col], which equates to [y][x]. Field of view and dimension arrays may + appear backward (i.e., y before x).' + is_a: NWBDataInterface + attributes: + axis_1_phase_map: + name: axis_1_phase_map + description: Phase response to stimulus on the first measured axis. + multivalued: false + range: ImagingRetinotopy_axis_1_phase_map + required: true + axis_1_power_map: + name: axis_1_power_map + description: Power response on the first measured axis. Response is scaled + so 0.0 is no power in the response and 1.0 is maximum relative power. + multivalued: false + range: ImagingRetinotopy_axis_1_power_map + required: false + axis_2_phase_map: + name: axis_2_phase_map + description: Phase response to stimulus on the second measured axis. + multivalued: false + range: ImagingRetinotopy_axis_2_phase_map + required: true + axis_2_power_map: + name: axis_2_power_map + description: Power response on the second measured axis. Response is scaled + so 0.0 is no power in the response and 1.0 is maximum relative power. + multivalued: false + range: ImagingRetinotopy_axis_2_power_map + required: false + axis_descriptions: + name: axis_descriptions + description: Two-element array describing the contents of the two response + axis fields. Description should be something like ['altitude', 'azimuth'] + or '['radius', 'theta']. + multivalued: false + range: ImagingRetinotopy_axis_descriptions + required: true + focal_depth_image: + name: focal_depth_image + description: 'Gray-scale image taken with same settings/parameters (e.g., + focal depth, wavelength) as data collection. Array format: [rows][columns].' + multivalued: false + range: ImagingRetinotopy_focal_depth_image + required: false + sign_map: + name: sign_map + description: Sine of the angle between the direction of the gradient in axis_1 + and axis_2. + multivalued: false + range: ImagingRetinotopy_sign_map + required: false + vasculature_image: + name: vasculature_image + description: 'Gray-scale anatomical image of cortical surface. Array structure: + [rows][columns]' + multivalued: false + range: ImagingRetinotopy_vasculature_image + required: true + ImagingRetinotopy_axis_1_phase_map: + name: ImagingRetinotopy_axis_1_phase_map + description: Phase response to stimulus on the first measured axis. + attributes: + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + range: ImagingRetinotopy_axis_1_phase_map_Array + ImagingRetinotopy_axis_1_phase_map_Array: + name: ImagingRetinotopy_axis_1_phase_map_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: float32 + required: false + num_cols: + name: num_cols + range: float32 + required: false + ImagingRetinotopy_axis_1_power_map: + name: ImagingRetinotopy_axis_1_power_map + description: Power response on the first measured axis. Response is scaled so + 0.0 is no power in the response and 1.0 is maximum relative power. + attributes: + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + range: ImagingRetinotopy_axis_1_power_map_Array + ImagingRetinotopy_axis_1_power_map_Array: + name: ImagingRetinotopy_axis_1_power_map_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: float32 + required: false + num_cols: + name: num_cols + range: float32 + required: false + ImagingRetinotopy_axis_2_phase_map: + name: ImagingRetinotopy_axis_2_phase_map + description: Phase response to stimulus on the second measured axis. + attributes: + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + range: ImagingRetinotopy_axis_2_phase_map_Array + ImagingRetinotopy_axis_2_phase_map_Array: + name: ImagingRetinotopy_axis_2_phase_map_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: float32 + required: false + num_cols: + name: num_cols + range: float32 + required: false + ImagingRetinotopy_axis_2_power_map: + name: ImagingRetinotopy_axis_2_power_map + description: Power response on the second measured axis. Response is scaled so + 0.0 is no power in the response and 1.0 is maximum relative power. + attributes: + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + range: ImagingRetinotopy_axis_2_power_map_Array + ImagingRetinotopy_axis_2_power_map_Array: + name: ImagingRetinotopy_axis_2_power_map_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: float32 + required: false + num_cols: + name: num_cols + range: float32 + required: false + ImagingRetinotopy_axis_descriptions: + name: ImagingRetinotopy_axis_descriptions + description: Two-element array describing the contents of the two response axis + fields. Description should be something like ['altitude', 'azimuth'] or '['radius', + 'theta']. + attributes: + array: + name: array + range: ImagingRetinotopy_axis_descriptions_Array + ImagingRetinotopy_axis_descriptions_Array: + name: ImagingRetinotopy_axis_descriptions_Array + is_a: Arraylike + attributes: + axis_1, axis_2: + name: axis_1, axis_2 + range: text + required: true + minimum_cardinality: 2 + maximum_cardinality: 2 + ImagingRetinotopy_focal_depth_image: + name: ImagingRetinotopy_focal_depth_image + description: 'Gray-scale image taken with same settings/parameters (e.g., focal + depth, wavelength) as data collection. Array format: [rows][columns].' + attributes: + bits_per_pixel: + name: bits_per_pixel + description: Number of bits used to represent each value. This is necessary + to determine maximum (white) pixel value. + range: int32 + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + focal_depth: + name: focal_depth + description: Focal depth offset, in meters. + range: float32 + format: + name: format + description: Format of image. Right now only 'raw' is supported. + range: text + array: + name: array + range: ImagingRetinotopy_focal_depth_image_Array + ImagingRetinotopy_focal_depth_image_Array: + name: ImagingRetinotopy_focal_depth_image_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: uint16 + required: false + num_cols: + name: num_cols + range: uint16 + required: false + ImagingRetinotopy_sign_map: + name: ImagingRetinotopy_sign_map + description: Sine of the angle between the direction of the gradient in axis_1 + and axis_2. + attributes: + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + array: + name: array + range: ImagingRetinotopy_sign_map_Array + ImagingRetinotopy_sign_map_Array: + name: ImagingRetinotopy_sign_map_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: float32 + required: false + num_cols: + name: num_cols + range: float32 + required: false + ImagingRetinotopy_vasculature_image: + name: ImagingRetinotopy_vasculature_image + description: 'Gray-scale anatomical image of cortical surface. Array structure: + [rows][columns]' + attributes: + bits_per_pixel: + name: bits_per_pixel + description: Number of bits used to represent each value. This is necessary + to determine maximum (white) pixel value + range: int32 + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + format: + name: format + description: Format of image. Right now only 'raw' is supported. + range: text + array: + name: array + range: ImagingRetinotopy_vasculature_image_Array + ImagingRetinotopy_vasculature_image_Array: + name: ImagingRetinotopy_vasculature_image_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: uint16 + required: false + num_cols: + name: num_cols + range: uint16 + required: false diff --git a/nwb_linkml/schema/core.yaml b/nwb_linkml/schema/core.yaml new file mode 100644 index 0000000..c463cc5 --- /dev/null +++ b/nwb_linkml/schema/core.yaml @@ -0,0 +1,18 @@ +name: core +description: NWB namespace +id: core +version: 2.6.0-alpha +imports: +- core.nwb.base +- core.nwb.device +- core.nwb.epoch +- core.nwb.image +- core.nwb.file +- core.nwb.misc +- core.nwb.behavior +- core.nwb.ecephys +- core.nwb.icephys +- core.nwb.ogen +- core.nwb.ophys +- core.nwb.retinotopy +default_prefix: core/ diff --git a/nwb_linkml/schema/hdmf-common.base.yaml b/nwb_linkml/schema/hdmf-common.base.yaml new file mode 100644 index 0000000..04204f0 --- /dev/null +++ b/nwb_linkml/schema/hdmf-common.base.yaml @@ -0,0 +1,30 @@ +name: hdmf-common.base +id: hdmf-common.base +imports: +- nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + Data: + name: Data + description: Data objects held within this SimpleMultiContainer. + multivalued: true + range: Data + required: false + Container: + name: Container + description: Container objects held within this SimpleMultiContainer. + multivalued: true + range: Container + required: false diff --git a/nwb_linkml/schema/hdmf-common.sparse.yaml b/nwb_linkml/schema/hdmf-common.sparse.yaml new file mode 100644 index 0000000..d2da441 --- /dev/null +++ b/nwb_linkml/schema/hdmf-common.sparse.yaml @@ -0,0 +1,82 @@ +name: hdmf-common.sparse +id: hdmf-common.sparse +imports: +- hdmf-common.base +- nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + multivalued: false + range: CSRMatrix_indices + required: true + indptr: + name: indptr + description: The row index pointer. + multivalued: false + range: CSRMatrix_indptr + required: true + data: + name: data + description: The non-zero values in the matrix. + multivalued: false + range: CSRMatrix_data + required: true + CSRMatrix_indices: + name: CSRMatrix_indices + description: The column indices. + attributes: + array: + name: array + range: CSRMatrix_indices_Array + CSRMatrix_indices_Array: + name: CSRMatrix_indices_Array + is_a: Arraylike + attributes: + number of non-zero values: + name: number of non-zero values + range: uint + required: true + CSRMatrix_indptr: + name: CSRMatrix_indptr + description: The row index pointer. + attributes: + array: + name: array + range: CSRMatrix_indptr_Array + CSRMatrix_indptr_Array: + name: CSRMatrix_indptr_Array + is_a: Arraylike + attributes: + number of rows in the matrix + 1: + name: number of rows in the matrix + 1 + range: uint + required: true + CSRMatrix_data: + name: CSRMatrix_data + description: The non-zero values in the matrix. + attributes: + array: + name: array + range: CSRMatrix_data_Array + CSRMatrix_data_Array: + name: CSRMatrix_data_Array + is_a: Arraylike + attributes: + number of non-zero values: + name: number of non-zero values + range: AnyType + required: true diff --git a/nwb_linkml/schema/hdmf-common.table.yaml b/nwb_linkml/schema/hdmf-common.table.yaml new file mode 100644 index 0000000..ce283e6 --- /dev/null +++ b/nwb_linkml/schema/hdmf-common.table.yaml @@ -0,0 +1,209 @@ +name: hdmf-common.table +id: hdmf-common.table +imports: +- hdmf-common.base +- nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: VectorData_Array + VectorData_Array: + name: VectorData_Array + is_a: Arraylike + attributes: + dim0: + name: dim0 + range: AnyType + required: true + dim1: + name: dim1 + range: AnyType + required: false + dim2: + name: dim2 + range: AnyType + required: false + dim3: + name: dim3 + range: AnyType + required: false + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + array: + name: array + range: VectorIndex_Array + VectorIndex_Array: + name: VectorIndex_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: uint8 + required: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + array: + name: array + range: ElementIdentifiers_Array + ElementIdentifiers_Array: + name: ElementIdentifiers_Array + is_a: Arraylike + attributes: + num_elements: + name: num_elements + range: int + required: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + array: + name: array + range: DynamicTableRegion_Array + DynamicTableRegion_Array: + name: DynamicTableRegion_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: int + required: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + multivalued: false + range: DynamicTable_id + required: true + VectorData: + name: VectorData + description: Vector columns, including index columns, of this dynamic table. + multivalued: true + range: VectorData + required: false + DynamicTable_id: + name: DynamicTable_id + description: Array of unique identifiers for the rows of this dynamic table. + is_a: ElementIdentifiers + attributes: + array: + name: array + range: DynamicTable_id_Array + DynamicTable_id_Array: + name: DynamicTable_id_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: int + required: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + DynamicTable: + name: DynamicTable + description: A DynamicTable representing a particular category for columns + in the AlignedDynamicTable parent container. The table MUST be aligned with + (i.e., have the same number of rows) as all other DynamicTables stored in + the AlignedDynamicTable parent container. The name of the category is given + by the name of the DynamicTable and its description by the description attribute + of the DynamicTable. + multivalued: true + range: DynamicTable + required: false diff --git a/nwb_linkml/schema/hdmf-common.yaml b/nwb_linkml/schema/hdmf-common.yaml new file mode 100644 index 0000000..c43cbc1 --- /dev/null +++ b/nwb_linkml/schema/hdmf-common.yaml @@ -0,0 +1,9 @@ +name: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.8.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +default_prefix: hdmf-common/ diff --git a/nwb_linkml/schema/hdmf-experimental.experimental.yaml b/nwb_linkml/schema/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..d9052ca --- /dev/null +++ b/nwb_linkml/schema/hdmf-experimental.experimental.yaml @@ -0,0 +1,18 @@ +name: hdmf-experimental.experimental +id: hdmf-experimental.experimental +imports: +- hdmf-common.table +- nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData diff --git a/nwb_linkml/schema/hdmf-experimental.resources.yaml b/nwb_linkml/schema/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..b57caae --- /dev/null +++ b/nwb_linkml/schema/hdmf-experimental.resources.yaml @@ -0,0 +1,149 @@ +name: hdmf-experimental.resources +id: hdmf-experimental.resources +imports: +- hdmf-common.base +- nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + HERD: + name: HERD + description: HDMF External Resources Data Structure. A set of six tables for tracking + external resource references in a file or across multiple files. + is_a: Container + attributes: + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + multivalued: false + range: HERD_keys + required: true + files: + name: files + description: A table for storing object ids of files used in external resources. + multivalued: false + range: HERD_files + required: true + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + multivalued: false + range: HERD_entities + required: true + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + multivalued: false + range: HERD_objects + required: true + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + multivalued: false + range: HERD_object_keys + required: true + entity_keys: + name: entity_keys + description: A table for identifying which keys use which entity. + multivalued: false + range: HERD_entity_keys + required: true + HERD_keys: + name: HERD_keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + array: + name: array + range: HERD_keys_Array + HERD_keys_Array: + name: HERD_keys_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true + HERD_files: + name: HERD_files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + array: + name: array + range: HERD_files_Array + HERD_files_Array: + name: HERD_files_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true + HERD_entities: + name: HERD_entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + array: + name: array + range: HERD_entities_Array + HERD_entities_Array: + name: HERD_entities_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true + HERD_objects: + name: HERD_objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + array: + name: array + range: HERD_objects_Array + HERD_objects_Array: + name: HERD_objects_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true + HERD_object_keys: + name: HERD_object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + array: + name: array + range: HERD_object_keys_Array + HERD_object_keys_Array: + name: HERD_object_keys_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true + HERD_entity_keys: + name: HERD_entity_keys + description: A table for identifying which keys use which entity. + is_a: Data + attributes: + array: + name: array + range: HERD_entity_keys_Array + HERD_entity_keys_Array: + name: HERD_entity_keys_Array + is_a: Arraylike + attributes: + num_rows: + name: num_rows + range: AnyType + required: true diff --git a/nwb_linkml/schema/hdmf-experimental.yaml b/nwb_linkml/schema/hdmf-experimental.yaml new file mode 100644 index 0000000..2eadb29 --- /dev/null +++ b/nwb_linkml/schema/hdmf-experimental.yaml @@ -0,0 +1,9 @@ +name: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.5.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/schema/nwb.language.yaml b/nwb_linkml/schema/nwb.language.yaml new file mode 100644 index 0000000..53c02e6 --- /dev/null +++ b/nwb_linkml/schema/nwb.language.yaml @@ -0,0 +1,154 @@ +name: nwb.language +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float: + name: float + typeof: float + float32: + name: float32 + typeof: float + double: + name: double + typeof: double + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: date +enums: + FlatDType: + name: FlatDType + permissible_values: + float: + text: float + float32: + text: float32 + double: + text: double + float64: + text: float64 + long: + text: long + int64: + text: int64 + int: + text: int + int32: + text: int32 + int16: + text: int16 + short: + text: short + int8: + text: int8 + uint: + text: uint + uint32: + text: uint32 + uint16: + text: uint16 + uint8: + text: uint8 + uint64: + text: uint64 + numeric: + text: numeric + text: + text: text + utf: + text: utf + utf8: + text: utf8 + utf_8: + text: utf_8 + ascii: + text: ascii + bool: + text: bool + isodatetime: + text: isodatetime +classes: + Arraylike: + name: Arraylike + description: Container for arraylike information held in the dims, shape, and + dtype properties.this is a special case to be interpreted by downstream i/o. + this class has no slotsand is abstract by default.- Each slot within a subclass + indicates a possible dimension.- Only dimensions that are present in all the + dimension specifiers in the original schema are required.- Shape requirements + are indicated using max/min cardinalities on the slot. + abstract: true + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_schema_language/src/data/tests/nwb.base.yaml b/nwb_schema_language/src/data/tests/nwb.base.yaml index 52fb3b5..859e904 100644 --- a/nwb_schema_language/src/data/tests/nwb.base.yaml +++ b/nwb_schema_language/src/data/tests/nwb.base.yaml @@ -253,7 +253,7 @@ groups: dtype: text doc: Description of this collection of images. datasets: -# - neurodata_type_inc: Image + - neurodata_type_inc: Image doc: Images stored in this collection. quantity: '+' - name: order_of_images diff --git a/poetry.lock b/poetry.lock index b355a4e..5f37595 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,20 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +[[package]] +name = "ansi2html" +version = "1.8.0" +description = "" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ansi2html-1.8.0-py3-none-any.whl", hash = "sha256:ef9cc9682539dbe524fbf8edad9c9462a308e04bce1170c32daa8fdfd0001785"}, + {file = "ansi2html-1.8.0.tar.gz", hash = "sha256:38b82a298482a1fa2613f0f9c9beb3db72a8f832eeac58eb2e47bf32cd37f6d5"}, +] + +[package.extras] +docs = ["Sphinx", "setuptools-scm", "sphinx-rtd-theme"] +test = ["pytest", "pytest-cov"] + [[package]] name = "antlr4-python3-runtime" version = "4.9.3" @@ -210,6 +225,86 @@ pandas = ["pandas"] rdflib = ["rdflib"] tests = ["coverage", "pytest"] +[[package]] +name = "dash" +version = "2.12.1" +description = "A Python framework for building reactive web-apps. Developed by Plotly." +optional = false +python-versions = ">=3.6" +files = [ + {file = "dash-2.12.1-py3-none-any.whl", hash = "sha256:23fcde95e59e353c34712c8fa3e90e784a7247a9e5f6ef47e467add10b7e91ab"}, + {file = "dash-2.12.1.tar.gz", hash = "sha256:c7d3dccafff2d041a371dcf5bbb2a1701a38ca178c12dce93e64207e3aecbaeb"}, +] + +[package.dependencies] +ansi2html = "*" +dash-core-components = "2.0.0" +dash-html-components = "2.0.0" +dash-table = "5.0.0" +Flask = ">=1.0.4,<2.3.0" +nest-asyncio = "*" +plotly = ">=5.0.0" +requests = "*" +retrying = "*" +setuptools = "*" +typing-extensions = ">=4.1.1" +Werkzeug = "<2.3.0" + +[package.extras] +celery = ["celery[redis] (>=5.1.2)", "importlib-metadata (<5)", "redis (>=3.5.3)"] +ci = ["black (==21.6b0)", "black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==3.9.2)", "flaky (==3.7.0)", "flask-talisman (==1.0.0)", "isort (==4.3.21)", "jupyterlab (<4.0.0)", "mimesis", "mock (==4.0.3)", "numpy", "openpyxl", "orjson (==3.5.4)", "orjson (==3.6.7)", "pandas (==1.1.5)", "pandas (>=1.4.0)", "preconditions", "pyarrow", "pyarrow (<3)", "pylint (==2.13.5)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "xlrd (<2)", "xlrd (>=2.0.1)"] +compress = ["flask-compress"] +dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"] +diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"] +testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"] + +[[package]] +name = "dash-core-components" +version = "2.0.0" +description = "Core component suite for Dash" +optional = false +python-versions = "*" +files = [ + {file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"}, + {file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"}, +] + +[[package]] +name = "dash-cytoscape" +version = "0.3.0" +description = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js" +optional = false +python-versions = "*" +files = [ + {file = "dash_cytoscape-0.3.0-py3-none-any.whl", hash = "sha256:718dc1568b9e7bfe7f64376aa903c64a1a1fe6daed4e559b254456f18dd3135f"}, + {file = "dash_cytoscape-0.3.0.tar.gz", hash = "sha256:a71ad4fe095570b71d4ad7c0d29199e9780c2e6796173d3b25fccc2cc58c855f"}, +] + +[package.dependencies] +dash = "*" + +[[package]] +name = "dash-html-components" +version = "2.0.0" +description = "Vanilla HTML components for Dash" +optional = false +python-versions = "*" +files = [ + {file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"}, + {file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"}, +] + +[[package]] +name = "dash-table" +version = "5.0.0" +description = "Dash table" +optional = false +python-versions = "*" +files = [ + {file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"}, + {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"}, +] + [[package]] name = "decorator" version = "5.1.1" @@ -249,6 +344,27 @@ files = [ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] +[[package]] +name = "flask" +version = "2.2.5" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, + {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"}, +] + +[package.dependencies] +click = ">=8.0" +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.2.2" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + [[package]] name = "fqdn" version = "1.5.1" @@ -443,6 +559,17 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + [[package]] name = "jinja2" version = "3.1.2" @@ -760,6 +887,17 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "nest-asyncio" +version = "1.5.7" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, + {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, +] + [[package]] name = "networkx" version = "3.1" @@ -834,6 +972,21 @@ files = [ {file = "parse-1.19.1.tar.gz", hash = "sha256:cc3a47236ff05da377617ddefa867b7ba983819c664e1afe46249e5b469be464"}, ] +[[package]] +name = "plotly" +version = "5.16.1" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "plotly-5.16.1-py2.py3-none-any.whl", hash = "sha256:19cc34f339acd4e624177806c14df22f388f23fb70658b03aad959a0e650a0dc"}, + {file = "plotly-5.16.1.tar.gz", hash = "sha256:295ac25edeb18c893abb71dcadcea075b78fd6fdf07cee4217a4e1009667925b"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + [[package]] name = "pluggy" version = "1.2.0" @@ -1247,6 +1400,20 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "retrying" +version = "1.3.4" +description = "Retrying" +optional = false +python-versions = "*" +files = [ + {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"}, + {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"}, +] + +[package.dependencies] +six = ">=1.7.0" + [[package]] name = "rfc3339-validator" version = "0.1.4" @@ -1460,6 +1627,22 @@ files = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] +[[package]] +name = "setuptools" +version = "68.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "shexjsg" version = "0.8.2" @@ -1610,6 +1793,20 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -1706,6 +1903,23 @@ files = [ docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] tests = ["pytest", "pytest-cov"] +[[package]] +name = "werkzeug" +version = "2.2.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + [[package]] name = "wrapt" version = "1.15.0" @@ -1808,4 +2022,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "debbeeaba69d6afc3da329ccc76e0c2ae3124773b85a577a10cb5e673845a9e5" +content-hash = "8b70e71931e2f519212b40f0dfcb9cc597e9f4894dbf392a0232df9060a35ee1" diff --git a/pyproject.toml b/pyproject.toml index 8a304da..24ae878 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,11 +19,20 @@ pydantic = "<2" rich = "^13.5.2" linkml = "^1.5.7" +[tool.poetry.group.tests] +optional = true -[tool.poetry.group.dev.dependencies] +[tool.poetry.group.tests.dependencies] pytest = "^7.4.0" pytest-depends = "^1.0.1" +[tool.poetry.group.plot] +optional = true + +[tool.poetry.group.plot.dependencies] +dash = "^2.12.1" +dash-cytoscape = "^0.3.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/scripts/generate_core.py b/scripts/generate_core.py new file mode 100644 index 0000000..161cba8 --- /dev/null +++ b/scripts/generate_core.py @@ -0,0 +1,62 @@ +from argparse import ArgumentParser +from pathlib import Path +from linkml_runtime.dumpers import yaml_dumper +from linkml.generators import PydanticGenerator + +from nwb_linkml import io + +def generate_core_yaml(output_path:Path): + core = io.load_nwb_core() + built_schemas = core.build().schemas + for schema in built_schemas: + output_file = output_path / (schema.name + '.yaml') + yaml_dumper.dump(schema, output_file) + +def generate_core_pydantic(yaml_path:Path, output_path:Path): + for schema in yaml_path.glob('*.yaml'): + pydantic_file = (output_path / schema.name).with_suffix('.py') + + generator = PydanticGenerator( + str(schema), + pydantic_version='1', + emit_metadata=True, + gen_classvars=True, + gen_slots=True + ) + gen_pydantic = generator.serialize() + with open(pydantic_file, 'w') as pfile: + pfile.write(gen_pydantic) + +def parser() -> ArgumentParser: + parser = ArgumentParser('Generate NWB core schema') + parser.add_argument( + '--yaml', + help="directory to export linkML schema to", + type=Path, + default=Path(__file__).parent.parent / 'nwb_linkml' / 'schema' + ) + parser.add_argument( + '--pydantic', + help="directory to export pydantic models", + type=Path, + default=Path(__file__).parent.parent / 'nwb_linkml' / 'models' + ) + return parser + + +def main(): + args = parser().parse_args() + args.yaml.mkdir(exist_ok=True) + args.pydantic.mkdir(exist_ok=True) + generate_core_yaml(args.yaml) + generate_core_pydantic(args.yaml, args.pydantic) + +if __name__ == "__main__": + main() + + + + + + + diff --git a/tests/test_generate.py b/tests/test_generate.py index f035674..3924fd5 100644 --- a/tests/test_generate.py +++ b/tests/test_generate.py @@ -21,17 +21,21 @@ def test_generate_core(nwb_core_fixture, tmp_output_dir): @pytest.mark.depends(on=['test_generate_core']) def test_generate_pydantic(tmp_output_dir): - core_file = tmp_output_dir / 'core.yaml' - pydantic_file = tmp_output_dir / 'core.py' - generator = PydanticGenerator( - str(core_file), - pydantic_version='1', - emit_metadata=True, - gen_classvars=True, - gen_slots=True - ) - gen_pydantic = generator.serialize() - with open(pydantic_file, 'w') as pfile: - pfile.write(gen_pydantic) + # core_file = tmp_output_dir / 'core.yaml' + # pydantic_file = tmp_output_dir / 'core.py' + + for schema in tmp_output_dir.glob('*.yaml'): + pydantic_file = (schema.parent / schema.name).with_suffix('.py') + + generator = PydanticGenerator( + str(schema), + pydantic_version='1', + emit_metadata=True, + gen_classvars=True, + gen_slots=True + ) + gen_pydantic = generator.serialize() + with open(pydantic_file, 'w') as pfile: + pfile.write(gen_pydantic)