I believe that's a full translation

or at least all the semantics are present. it's not pretty by any stretch of the imagination
This commit is contained in:
sneakers-the-rat 2023-08-25 00:22:47 -07:00
parent a4806543ef
commit 4faaa8efe8
50 changed files with 16674 additions and 52 deletions

View file

@ -1,2 +1,4 @@
# translate-nwb
# nwb-linkml
Translating NWB schema language to linkml
(very WIP dont @ me)

View file

@ -0,0 +1,4 @@
from nwb_linkml.adapters.adapter import Adapter
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.adapters.classes import ClassAdapter

View file

@ -4,7 +4,7 @@ Base class for adapters
from abc import abstractmethod
import warnings
from dataclasses import dataclass, field
from typing import List, Dict, Type, Generator, Any, Tuple, Optional
from typing import List, Dict, Type, Generator, Any, Tuple, Optional, TypeVar, TypeVarTuple, Unpack
from pydantic import BaseModel, Field, validator
from linkml_runtime.linkml_model import Element, SchemaDefinition, ClassDefinition, SlotDefinition, TypeDefinition
@ -46,6 +46,8 @@ class BuildResult:
self.types.extend(other.types)
return self
T = TypeVar
Ts = TypeVarTuple('Ts')
class Adapter(BaseModel):
@abstractmethod
@ -84,7 +86,7 @@ class Adapter(BaseModel):
yield item[1]
def walk_types(self, input: BaseModel | list | dict, get_type: Type | List[Type] | Tuple[Type]):
def walk_types(self, input: BaseModel | list | dict, get_type: T | List[Unpack[Ts]] | Tuple[Unpack[T]]) -> Generator[T, None, None]:
if not isinstance(get_type, (list, tuple)):
get_type = [get_type]

View file

@ -0,0 +1,2 @@

View file

@ -3,10 +3,11 @@ Adapters to linkML classes
"""
import pdb
from typing import List, Optional
from nwb_schema_language import Dataset, Group, ReferenceDtype, DTypeType
from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.lang_elements import Arraylike
class ClassAdapter(Adapter):
"""
@ -61,10 +62,122 @@ class ClassAdapter(Adapter):
return name
def handle_dtype(self, dtype: DTypeType):
def handle_arraylike(self, dataset: Dataset, name:Optional[str]=None) -> Optional[ClassDefinition]:
"""
Handling the
- dims
- shape
- dtype
fields as they are used in datasets. We'll use the :class:`.Arraylike` class to imitate them.
Specifically:
- Each slot within a subclass indicates a possible dimension.
- Only dimensions that are present in all the dimension specifiers in the
original schema are required.
- Shape requirements are indicated using max/min cardinalities on the slot.
- The arraylike object should be stored in the `array` slot on the containing class
(since there are already properties named `data`)
If any of `dims`, `shape`, or `dtype` are undefined, return `None`
Args:
dataset (:class:`nwb_schema_language.Dataset`): The dataset defining the arraylike
name (str): If present, override the name of the class before appending _Array
(we don't use _get_full_name here because we want to eventually decouple these functions from this adapter
class, which is sort of a development crutch. Ideally all these methods would just work on base nwb schema language types)
"""
if not any((dataset.dims, dataset.shape)):
# none of the required properties are defined, that's fine.
return
elif not all((dataset.dims, dataset.shape)):
# need to have both if one is present!
raise ValueError(f"A dataset needs both dims and shape to define an arraylike object")
# The schema language doesn't have a way of specifying a dataset/group is "abstract"
# and yet hdmf-common says you don't need a dtype if the dataset is "abstract"
# so....
dtype = self.handle_dtype(dataset.dtype)
# dims and shape are lists of lists. First we couple them
# (so each dim has its corresponding shape)..
# and then we take unique
# (dicts are ordered by default in recent pythons,
# while set() doesn't preserve order)
dims_shape = []
for inner_dim, inner_shape in zip(dataset.dims, dataset.shape):
if isinstance(inner_dim, list):
# list of lists
dims_shape.extend([(dim, shape) for dim, shape in zip(inner_dim, inner_shape)])
else:
# single-layer list
dims_shape.append((inner_dim, inner_shape))
dims_shape = tuple(dict.fromkeys(dims_shape).keys())
# now make slots for each of them
slots = []
for dims, shape in dims_shape:
# if a dim is present in all possible combinations of dims, make it required
if all([dims in inner_dim for inner_dim in dataset.dims]):
required = True
else:
required = False
# use cardinality to do shape
if shape == 'null':
cardinality = None
else:
cardinality = shape
slots.append(SlotDefinition(
name=dims,
required=required,
maximum_cardinality=cardinality,
minimum_cardinality=cardinality,
range=dtype
))
# and then the class is just a subclass of `Arraylike` (which is imported by default from `nwb.language.yaml`)
if name:
pass
elif dataset.neurodata_type_def:
name = dataset.neurodata_type_def
elif dataset.name:
name = dataset.name
else:
raise ValueError(f"Dataset has no name or type definition, what do call it?")
name = '_'.join([name, 'Array'])
array_class = ClassDefinition(
name=name,
is_a="Arraylike",
attributes=slots
)
return array_class
def handle_dtype(self, dtype: DTypeType | None) -> str:
if isinstance(dtype, ReferenceDtype):
return dtype.target_type
elif dtype is None or dtype == []:
# Some ill-defined datasets are "abstract" despite that not being in the schema language
return 'AnyType'
elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype):
# there is precisely one class that uses compound dtypes:
# TimeSeriesReferenceVectorData
# compoundDtypes are able to define a ragged table according to the schema
# but are used in this single case equivalently to attributes.
# so we'll... uh... treat them as slots.
# TODO
return 'AnyType'
#raise NotImplementedError('got distracted, need to implement')
else:
# flat dtype
return dtype
def build_attrs(self, cls: Dataset | Group) -> List[SlotDefinition]:
@ -72,7 +185,7 @@ class ClassAdapter(Adapter):
SlotDefinition(
name=attr.name,
description=attr.doc,
range=self.handle_dtype(attr.dtype)
range=self.handle_dtype(attr.dtype),
) for attr in cls.attributes
]
@ -93,7 +206,8 @@ class ClassAdapter(Adapter):
this_slot = SlotDefinition(
name=subclass._get_name(),
description=subclass.cls.doc,
range=subclass._get_full_name()
range=subclass._get_full_name(),
**QUANTITY_MAP[subclass.cls.quantity]
)
nested_res.slots.append(this_slot)
@ -113,8 +227,6 @@ class ClassAdapter(Adapter):
name = self._get_full_name()
else:
name = self._get_name()
# if name == 'TimeSeries':
# pdb.set_trace()
# Get vanilla top-level attributes
attrs = self.build_attrs(self.cls)
@ -122,16 +234,33 @@ class ClassAdapter(Adapter):
# unnest and build subclasses in datasets and groups
if isinstance(self.cls, Group):
# only groups have sub-datasets and sub-groups
# split out the recursion step rather than making purely recursive because
# top-level datasets and groups are handled differently - they have names,
# and so we need to split out which things we unnest and which things
# can just be slots because they are already defined without knowing about
# the global state of the schema build.
nested_res = self.build_subclasses(self.cls)
attrs.extend(nested_res.slots)
else:
# must be a dataset
nested_res = BuildResult()
arraylike = self.handle_arraylike(self.cls, self._get_full_name())
if arraylike:
# make a slot for the arraylike class
attrs.append(
SlotDefinition(
name='array',
range=arraylike.name
)
)
nested_res.classes.append(arraylike)
cls = ClassDefinition(
name = name,
is_a = self.cls.neurodata_type_inc,
description=self.cls.doc,
attributes=attrs
attributes=attrs,
)
res = BuildResult(
classes = [cls, *nested_res.classes]

View file

@ -20,27 +20,27 @@ FlatDType = EnumDefinition(
permissible_values=[PermissibleValue(p) for p in FlatDtype_source.__members__.keys()],
)
DimNameSlot = SlotDefinition(
name="dim_name",
range="string",
description="The name of a dimension"
)
DimShapeSlot = SlotDefinition(
name="dim_shape",
range="integer",
required=False
)
DimClass = ClassDefinition(
name="Dimension",
slots=[DimNameSlot.name, DimShapeSlot.name],
description="A single dimension within a shape"
)
DimSlot = SlotDefinition(
name="dim",
range=DimClass.name,
multivalued=True,
description="Slot representing the dimensions that a Shape can have"
)
# DimNameSlot = SlotDefinition(
# name="dim_name",
# range="string",
# description="The name of a dimension"
# )
# DimShapeSlot = SlotDefinition(
# name="dim_shape",
# range="integer",
# required=False
# )
# DimClass = ClassDefinition(
# name="Dimension",
# slots=[DimNameSlot.name, DimShapeSlot.name],
# description="A single dimension within a shape"
# )
# DimSlot = SlotDefinition(
# name="dim",
# range=DimClass.name,
# multivalued=True,
# description="Slot representing the dimensions that a Shape can have"
# )
# ShapeClass = ClassDefinition(
# name="Shape",
@ -61,14 +61,32 @@ for nwbtype, linkmltype in flat_to_linkml.items():
)
DTypeTypes.append(atype)
Arraylike = ClassDefinition(
name="Arraylike",
description= ("Container for arraylike information held in the dims, shape, and dtype properties."
"this is a special case to be interpreted by downstream i/o. this class has no slots"
"and is abstract by default."
"- Each slot within a subclass indicates a possible dimension."
"- Only dimensions that are present in all the dimension specifiers in the"
" original schema are required."
"- Shape requirements are indicated using max/min cardinalities on the slot."
),
abstract=True
)
AnyType = ClassDefinition(
name="AnyType",
class_uri="linkml:Any",
description="""Needed because some classes in hdmf-common are datasets without dtype"""
)
NwbLangSchema = SchemaDefinition(
name="nwb.language",
id='nwb.language',
description="Adapter objects to mimic the behavior of elements in the nwb-schema-language",
enums=[FlatDType],
slots=[DimNameSlot, DimShapeSlot, DimSlot],
classes=[DimClass],
# slots=[DimNameSlot, DimShapeSlot, DimSlot],
classes=[Arraylike, AnyType],
types=DTypeTypes,
imports=['linkml:types'],
prefixes={'linkml': Prefix('linkml','https://w3id.org/linkml')}

View file

@ -1,2 +1,3 @@
# Import everything so it's defined, but shoudlnt' necessarily be used from here
from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP

View file

@ -0,0 +1,34 @@
"""
Quantity maps on to two things: required and cardinality.
Though it is technically possible to use an integer as
a quantity, that is never done in the core schema,
which is our only target for now.
We will handle cardinality of array dimensions elsewhere
"""
QUANTITY_MAP = {
'*': {
'required': False,
'multivalued': True
},
'+': {
'required': True,
'multivalued': True
},
'?': {
'required': False,
'multivalued': False
},
1: {
'required': True,
'multivalued': False
},
# include the NoneType for indexing
None: {
'required': None,
'multivalued': None
}
}

View file

@ -0,0 +1,929 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class SpatialSeriesData(ConfiguredBaseModel):
"""
1-D or 2-D array storing position or direction relative to some reference frame.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
array: Optional[SpatialSeriesDataArray] = Field(None)
class SpatialSeriesReferenceFrame(ConfiguredBaseModel):
"""
Description defining what exactly 'straight-ahead' means.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class SpatialSeriesDataArray(Arraylike):
num_times: float = Field(...)
x: Optional[float] = Field(None)
xy: Optional[float] = Field(None)
xyz: Optional[float] = Field(None)
class AbstractFeatureSeriesData(ConfiguredBaseModel):
"""
Values of each feature at each time.
"""
unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""")
array: Optional[AbstractFeatureSeriesDataArray] = Field(None)
class AbstractFeatureSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_features: Optional[float] = Field(None)
class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel):
"""
Units of each feature.
"""
array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None)
class AbstractFeatureSeriesFeatureUnitsArray(Arraylike):
num_features: str = Field(...)
class AbstractFeatureSeriesFeatures(ConfiguredBaseModel):
"""
Description of the features represented in TimeSeries::data.
"""
array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None)
class AbstractFeatureSeriesFeaturesArray(Arraylike):
num_features: str = Field(...)
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[AnnotationSeriesDataArray] = Field(None)
class AnnotationSeriesDataArray(Arraylike):
num_times: str = Field(...)
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[IntervalSeriesDataArray] = Field(None)
class IntervalSeriesDataArray(Arraylike):
num_times: int = Field(...)
class DecompositionSeriesData(ConfiguredBaseModel):
"""
Data decomposed into frequency bands.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""")
array: Optional[DecompositionSeriesDataArray] = Field(None)
class DecompositionSeriesDataArray(Arraylike):
num_times: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_bands: Optional[float] = Field(None)
class DecompositionSeriesMetric(ConfiguredBaseModel):
"""
The metric used, e.g. phase, amplitude, power.
"""
None
class DecompositionSeriesBandsBandLimitsArray(Arraylike):
num_bands: Optional[float] = Field(None)
low_high: Optional[float] = Field(None)
class DecompositionSeriesBandsBandMeanArray(Arraylike):
num_bands: float = Field(...)
class DecompositionSeriesBandsBandStdevArray(Arraylike):
num_bands: float = Field(...)
class UnitsObsIntervalsArray(Arraylike):
num_intervals: Optional[float] = Field(None)
start|end: Optional[float] = Field(None)
class UnitsWaveformMeanArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformSdArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformsArray(Arraylike):
num_waveforms: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandName(VectorData):
"""
Name of the band, e.g. theta.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandLimits(VectorData):
"""
Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.
"""
array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandMean(VectorData):
"""
The mean Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandStdev(VectorData):
"""
The standard deviation of Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimes(VectorData):
"""
Spike times for each unit in seconds.
"""
resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsObsIntervals(VectorData):
"""
Observation intervals for each unit.
"""
array: Optional[UnitsObsIntervalsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodeGroup(VectorData):
"""
Electrode group that each spike unit came from.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformSdArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimesIndex(VectorIndex):
"""
Index into the spike_times dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsObsIntervalsIndex(VectorIndex):
"""
Index into the obs_intervals dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodesIndex(VectorIndex):
"""
Index into electrodes.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndex(VectorIndex):
"""
Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndexIndex(VectorIndex):
"""
Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
"""
DynamicTableRegion pointer to the channels that this decomposition series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class UnitsElectrodes(DynamicTableRegion):
"""
Electrode that each spike unit came from, specified using a DynamicTableRegion.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DecompositionSeriesBands(DynamicTable):
"""
Table for describing the bands that this series was generated from. There should be one row in this table for each band.
"""
band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""")
band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""")
band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""")
band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class Units(DynamicTable):
"""
Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class BehavioralEpochs(NWBDataInterface):
"""
TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data.
"""
IntervalSeries: Optional[List[IntervalSeries]] = Field(default_factory=list, description="""IntervalSeries object containing start and stop times of epochs.""")
class BehavioralEvents(NWBDataInterface):
"""
TimeSeries for storing behavioral events. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing behavioral events.""")
class BehavioralTimeSeries(NWBDataInterface):
"""
TimeSeries for storing Behavoioral time series data. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing continuous behavioral data.""")
class PupilTracking(NWBDataInterface):
"""
Eye-tracking data, representing pupil size.
"""
TimeSeries: List[TimeSeries] = Field(default_factory=list, description="""TimeSeries object containing time series data on pupil size.""")
class EyeTracking(NWBDataInterface):
"""
Eye-tracking data, representing direction of gaze.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing data measuring direction of gaze.""")
class CompassDirection(NWBDataInterface):
"""
With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing direction of gaze travel.""")
class Position(NWBDataInterface):
"""
Position data, whether along the x, x/y or x/y/z axis.
"""
SpatialSeries: List[SpatialSeries] = Field(default_factory=list, description="""SpatialSeries object containing position data.""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class SpatialSeries(TimeSeries):
"""
Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values.
"""
data: SpatialSeriesData = Field(..., description="""1-D or 2-D array storing position or direction relative to some reference frame.""")
reference_frame: Optional[SpatialSeriesReferenceFrame] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AbstractFeatureSeries(TimeSeries):
"""
Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
"""
data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""")
feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""")
features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AnnotationSeries(TimeSeries):
"""
Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
"""
data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
"""
data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
"""
data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""")
metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""")
bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
SpatialSeriesData.update_forward_refs()
SpatialSeriesReferenceFrame.update_forward_refs()
Arraylike.update_forward_refs()
SpatialSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesData.update_forward_refs()
AbstractFeatureSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesFeatureUnits.update_forward_refs()
AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs()
AbstractFeatureSeriesFeatures.update_forward_refs()
AbstractFeatureSeriesFeaturesArray.update_forward_refs()
AnnotationSeriesData.update_forward_refs()
AnnotationSeriesDataArray.update_forward_refs()
IntervalSeriesData.update_forward_refs()
IntervalSeriesDataArray.update_forward_refs()
DecompositionSeriesData.update_forward_refs()
DecompositionSeriesDataArray.update_forward_refs()
DecompositionSeriesMetric.update_forward_refs()
DecompositionSeriesBandsBandLimitsArray.update_forward_refs()
DecompositionSeriesBandsBandMeanArray.update_forward_refs()
DecompositionSeriesBandsBandStdevArray.update_forward_refs()
UnitsObsIntervalsArray.update_forward_refs()
UnitsWaveformMeanArray.update_forward_refs()
UnitsWaveformSdArray.update_forward_refs()
UnitsWaveformsArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
DecompositionSeriesBandsBandName.update_forward_refs()
DecompositionSeriesBandsBandLimits.update_forward_refs()
DecompositionSeriesBandsBandMean.update_forward_refs()
DecompositionSeriesBandsBandStdev.update_forward_refs()
UnitsSpikeTimes.update_forward_refs()
UnitsObsIntervals.update_forward_refs()
UnitsElectrodeGroup.update_forward_refs()
UnitsWaveformMean.update_forward_refs()
UnitsWaveformSd.update_forward_refs()
UnitsWaveforms.update_forward_refs()
VectorIndex.update_forward_refs()
UnitsSpikeTimesIndex.update_forward_refs()
UnitsObsIntervalsIndex.update_forward_refs()
UnitsElectrodesIndex.update_forward_refs()
UnitsWaveformsIndex.update_forward_refs()
UnitsWaveformsIndexIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DecompositionSeriesSourceChannels.update_forward_refs()
UnitsElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
DecompositionSeriesBands.update_forward_refs()
Units.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
BehavioralEpochs.update_forward_refs()
BehavioralEvents.update_forward_refs()
BehavioralTimeSeries.update_forward_refs()
PupilTracking.update_forward_refs()
EyeTracking.update_forward_refs()
CompassDirection.update_forward_refs()
Position.update_forward_refs()
TimeSeries.update_forward_refs()
SpatialSeries.update_forward_refs()
AbstractFeatureSeries.update_forward_refs()
AnnotationSeries.update_forward_refs()
IntervalSeries.update_forward_refs()
DecompositionSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -0,0 +1,420 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class Device(NWBContainer):
"""
Metadata about a data acquisition device, e.g., recording system, electrode, microscope.
"""
description: Optional[str] = Field(None, description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""")
manufacturer: Optional[str] = Field(None, description="""The name of the manufacturer of the device.""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
Device.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,484 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsStartTime(VectorData):
"""
Start time of epoch, in seconds.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsStopTime(VectorData):
"""
Stop time of epoch, in seconds.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsTags(VectorData):
"""
User-defined tags that identify or categorize events.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData):
"""
An index into a TimeSeries object.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class TimeIntervalsTagsIndex(VectorIndex):
"""
Index for tags.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class TimeIntervals(DynamicTable):
"""
A container for aggregating epoch data and the TimeSeries that each epoch applies to.
"""
start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""")
stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""")
tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeIntervalsStartTime.update_forward_refs()
TimeIntervalsStopTime.update_forward_refs()
TimeIntervalsTags.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
TimeIntervalsTimeseries.update_forward_refs()
VectorIndex.update_forward_refs()
TimeIntervalsTagsIndex.update_forward_refs()
TimeIntervalsTimeseriesIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
TimeIntervals.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,656 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ImageSeriesData(ConfiguredBaseModel):
"""
Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
"""
array: Optional[ImageSeriesDataArray] = Field(None)
class ImageSeriesDimension(ConfiguredBaseModel):
"""
Number of pixels on x, y, (and z) axes.
"""
array: Optional[ImageSeriesDimensionArray] = Field(None)
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
"""
starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""")
array: Optional[ImageSeriesExternalFileArray] = Field(None)
class ImageSeriesFormat(ConfiguredBaseModel):
"""
Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.
"""
None
class OpticalSeriesDistance(ConfiguredBaseModel):
"""
Distance from camera/monitor to target/eye.
"""
None
class OpticalSeriesFieldOfView(ConfiguredBaseModel):
"""
Width, height and depth of image, or imaged area, in meters.
"""
array: Optional[OpticalSeriesFieldOfViewArray] = Field(None)
class OpticalSeriesData(ConfiguredBaseModel):
"""
Images presented to subject, either grayscale or RGB
"""
array: Optional[OpticalSeriesDataArray] = Field(None)
class OpticalSeriesOrientation(ConfiguredBaseModel):
"""
Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.
"""
None
class IndexSeriesData(ConfiguredBaseModel):
"""
Index of the image (using zero-indexing) in the linked Images object.
"""
conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""")
array: Optional[IndexSeriesDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class GrayscaleImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
class RGBImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b: Optional[float] = Field(None)
class RGBAImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
z: Optional[float] = Field(None)
class ImageSeriesDimensionArray(Arraylike):
rank: int = Field(...)
class ImageSeriesExternalFileArray(Arraylike):
num_files: str = Field(...)
class OpticalSeriesFieldOfViewArray(Arraylike):
width_height: Optional[float] = Field(None)
width_height_depth: Optional[float] = Field(None)
class OpticalSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
class IndexSeriesDataArray(Arraylike):
num_times: int = Field(...)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class GrayscaleImage(Image):
"""
A grayscale image.
"""
array: Optional[GrayscaleImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBImage(Image):
"""
A color image.
"""
array: Optional[RGBImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBAImage(Image):
"""
A color image with transparency.
"""
array: Optional[RGBAImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ImageSeries(TimeSeries):
"""
General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z].
"""
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ImageMaskSeries(ImageSeries):
"""
An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed.
"""
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class OpticalSeries(ImageSeries):
"""
Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important.
"""
distance: Optional[OpticalSeriesDistance] = Field(None, description="""Distance from camera/monitor to target/eye.""")
field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[OpticalSeriesOrientation] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
"""
data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImageSeriesData.update_forward_refs()
ImageSeriesDimension.update_forward_refs()
ImageSeriesExternalFile.update_forward_refs()
ImageSeriesFormat.update_forward_refs()
OpticalSeriesDistance.update_forward_refs()
OpticalSeriesFieldOfView.update_forward_refs()
OpticalSeriesData.update_forward_refs()
OpticalSeriesOrientation.update_forward_refs()
IndexSeriesData.update_forward_refs()
Arraylike.update_forward_refs()
GrayscaleImageArray.update_forward_refs()
RGBImageArray.update_forward_refs()
RGBAImageArray.update_forward_refs()
ImageSeriesDataArray.update_forward_refs()
ImageSeriesDimensionArray.update_forward_refs()
ImageSeriesExternalFileArray.update_forward_refs()
OpticalSeriesFieldOfViewArray.update_forward_refs()
OpticalSeriesDataArray.update_forward_refs()
IndexSeriesDataArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
GrayscaleImage.update_forward_refs()
RGBImage.update_forward_refs()
RGBAImage.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ImageSeries.update_forward_refs()
ImageMaskSeries.update_forward_refs()
OpticalSeries.update_forward_refs()
IndexSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,831 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class AbstractFeatureSeriesData(ConfiguredBaseModel):
"""
Values of each feature at each time.
"""
unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""")
array: Optional[AbstractFeatureSeriesDataArray] = Field(None)
class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel):
"""
Units of each feature.
"""
array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None)
class AbstractFeatureSeriesFeatures(ConfiguredBaseModel):
"""
Description of the features represented in TimeSeries::data.
"""
array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None)
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[AnnotationSeriesDataArray] = Field(None)
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[IntervalSeriesDataArray] = Field(None)
class DecompositionSeriesData(ConfiguredBaseModel):
"""
Data decomposed into frequency bands.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""")
array: Optional[DecompositionSeriesDataArray] = Field(None)
class DecompositionSeriesMetric(ConfiguredBaseModel):
"""
The metric used, e.g. phase, amplitude, power.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class AbstractFeatureSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_features: Optional[float] = Field(None)
class AbstractFeatureSeriesFeatureUnitsArray(Arraylike):
num_features: str = Field(...)
class AbstractFeatureSeriesFeaturesArray(Arraylike):
num_features: str = Field(...)
class AnnotationSeriesDataArray(Arraylike):
num_times: str = Field(...)
class IntervalSeriesDataArray(Arraylike):
num_times: int = Field(...)
class DecompositionSeriesDataArray(Arraylike):
num_times: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_bands: Optional[float] = Field(None)
class DecompositionSeriesBandsBandLimitsArray(Arraylike):
num_bands: Optional[float] = Field(None)
low_high: Optional[float] = Field(None)
class DecompositionSeriesBandsBandMeanArray(Arraylike):
num_bands: float = Field(...)
class DecompositionSeriesBandsBandStdevArray(Arraylike):
num_bands: float = Field(...)
class UnitsObsIntervalsArray(Arraylike):
num_intervals: Optional[float] = Field(None)
start|end: Optional[float] = Field(None)
class UnitsWaveformMeanArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformSdArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformsArray(Arraylike):
num_waveforms: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandName(VectorData):
"""
Name of the band, e.g. theta.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandLimits(VectorData):
"""
Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.
"""
array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandMean(VectorData):
"""
The mean Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandStdev(VectorData):
"""
The standard deviation of Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimes(VectorData):
"""
Spike times for each unit in seconds.
"""
resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsObsIntervals(VectorData):
"""
Observation intervals for each unit.
"""
array: Optional[UnitsObsIntervalsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodeGroup(VectorData):
"""
Electrode group that each spike unit came from.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformSdArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimesIndex(VectorIndex):
"""
Index into the spike_times dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsObsIntervalsIndex(VectorIndex):
"""
Index into the obs_intervals dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodesIndex(VectorIndex):
"""
Index into electrodes.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndex(VectorIndex):
"""
Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndexIndex(VectorIndex):
"""
Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
"""
DynamicTableRegion pointer to the channels that this decomposition series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class UnitsElectrodes(DynamicTableRegion):
"""
Electrode that each spike unit came from, specified using a DynamicTableRegion.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DecompositionSeriesBands(DynamicTable):
"""
Table for describing the bands that this series was generated from. There should be one row in this table for each band.
"""
band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""")
band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""")
band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""")
band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class Units(DynamicTable):
"""
Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AbstractFeatureSeries(TimeSeries):
"""
Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
"""
data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""")
feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""")
features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AnnotationSeries(TimeSeries):
"""
Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
"""
data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
"""
data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
"""
data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""")
metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""")
bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
AbstractFeatureSeriesData.update_forward_refs()
AbstractFeatureSeriesFeatureUnits.update_forward_refs()
AbstractFeatureSeriesFeatures.update_forward_refs()
AnnotationSeriesData.update_forward_refs()
IntervalSeriesData.update_forward_refs()
DecompositionSeriesData.update_forward_refs()
DecompositionSeriesMetric.update_forward_refs()
Arraylike.update_forward_refs()
AbstractFeatureSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs()
AbstractFeatureSeriesFeaturesArray.update_forward_refs()
AnnotationSeriesDataArray.update_forward_refs()
IntervalSeriesDataArray.update_forward_refs()
DecompositionSeriesDataArray.update_forward_refs()
DecompositionSeriesBandsBandLimitsArray.update_forward_refs()
DecompositionSeriesBandsBandMeanArray.update_forward_refs()
DecompositionSeriesBandsBandStdevArray.update_forward_refs()
UnitsObsIntervalsArray.update_forward_refs()
UnitsWaveformMeanArray.update_forward_refs()
UnitsWaveformSdArray.update_forward_refs()
UnitsWaveformsArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
DecompositionSeriesBandsBandName.update_forward_refs()
DecompositionSeriesBandsBandLimits.update_forward_refs()
DecompositionSeriesBandsBandMean.update_forward_refs()
DecompositionSeriesBandsBandStdev.update_forward_refs()
UnitsSpikeTimes.update_forward_refs()
UnitsObsIntervals.update_forward_refs()
UnitsElectrodeGroup.update_forward_refs()
UnitsWaveformMean.update_forward_refs()
UnitsWaveformSd.update_forward_refs()
UnitsWaveforms.update_forward_refs()
VectorIndex.update_forward_refs()
UnitsSpikeTimesIndex.update_forward_refs()
UnitsObsIntervalsIndex.update_forward_refs()
UnitsElectrodesIndex.update_forward_refs()
UnitsWaveformsIndex.update_forward_refs()
UnitsWaveformsIndexIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DecompositionSeriesSourceChannels.update_forward_refs()
UnitsElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
DecompositionSeriesBands.update_forward_refs()
Units.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
AbstractFeatureSeries.update_forward_refs()
AnnotationSeries.update_forward_refs()
IntervalSeries.update_forward_refs()
DecompositionSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -0,0 +1,475 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class OptogeneticSeriesData(ConfiguredBaseModel):
"""
Applied power for optogenetic stimulus, in watts.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for data, which is fixed to 'watts'.""")
array: Optional[OptogeneticSeriesDataArray] = Field(None)
class OptogeneticStimulusSiteDescription(ConfiguredBaseModel):
"""
Description of stimulation site.
"""
None
class OptogeneticStimulusSiteExcitationLambda(ConfiguredBaseModel):
"""
Excitation wavelength, in nm.
"""
None
class OptogeneticStimulusSiteLocation(ConfiguredBaseModel):
"""
Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class OptogeneticSeriesDataArray(Arraylike):
num_times: float = Field(...)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
"""
description: OptogeneticStimulusSiteDescription = Field(..., description="""Description of stimulation site.""")
excitation_lambda: OptogeneticStimulusSiteExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""")
location: OptogeneticStimulusSiteLocation = Field(..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class OptogeneticSeries(TimeSeries):
"""
An optogenetic stimulus.
"""
data: OptogeneticSeriesData = Field(..., description="""Applied power for optogenetic stimulus, in watts.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
OptogeneticSeriesData.update_forward_refs()
OptogeneticStimulusSiteDescription.update_forward_refs()
OptogeneticStimulusSiteExcitationLambda.update_forward_refs()
OptogeneticStimulusSiteLocation.update_forward_refs()
Arraylike.update_forward_refs()
OptogeneticSeriesDataArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
OptogeneticStimulusSite.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
OptogeneticSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,272 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "1.8.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class CSRMatrixIndices(ConfiguredBaseModel):
"""
The column indices.
"""
array: Optional[CSRMatrixIndicesArray] = Field(None)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
The row index pointer.
"""
array: Optional[CSRMatrixIndptrArray] = Field(None)
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
array: Optional[CSRMatrixDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class CSRMatrixIndicesArray(Arraylike):
number_of_non_zero_values: int = Field(...)
class CSRMatrixIndptrArray(Arraylike):
number_of_rows_in_the_matrix_+_1: int = Field(...)
class CSRMatrixDataArray(Arraylike):
number_of_non_zero_values: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: CSRMatrixIndices = Field(..., description="""The column indices.""")
indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""")
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrixIndices.update_forward_refs()
CSRMatrixIndptr.update_forward_refs()
CSRMatrixData.update_forward_refs()
Arraylike.update_forward_refs()
CSRMatrixIndicesArray.update_forward_refs()
CSRMatrixIndptrArray.update_forward_refs()
CSRMatrixDataArray.update_forward_refs()
Data.update_forward_refs()
Container.update_forward_refs()
CSRMatrix.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
VectorData.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndex.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiers.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTable.update_forward_refs()
DynamicTableId.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
AlignedDynamicTable.update_forward_refs()

View file

@ -0,0 +1,170 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class CSRMatrixIndices(ConfiguredBaseModel):
"""
The column indices.
"""
array: Optional[CSRMatrixIndicesArray] = Field(None)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
The row index pointer.
"""
array: Optional[CSRMatrixIndptrArray] = Field(None)
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
array: Optional[CSRMatrixDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class CSRMatrixIndicesArray(Arraylike):
number_of_non_zero_values: int = Field(...)
class CSRMatrixIndptrArray(Arraylike):
number_of_rows_in_the_matrix_+_1: int = Field(...)
class CSRMatrixDataArray(Arraylike):
number_of_non_zero_values: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: CSRMatrixIndices = Field(..., description="""The column indices.""")
indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""")
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrixIndices.update_forward_refs()
CSRMatrixIndptr.update_forward_refs()
CSRMatrixData.update_forward_refs()
Arraylike.update_forward_refs()
CSRMatrixIndicesArray.update_forward_refs()
CSRMatrixIndptrArray.update_forward_refs()
CSRMatrixDataArray.update_forward_refs()
Data.update_forward_refs()
Container.update_forward_refs()
CSRMatrix.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,229 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
EnumData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,214 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class HERDKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDFilesArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntitiesArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectsArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntityKeysArray(Arraylike):
num_rows: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class HERDKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
array: Optional[HERDKeysArray] = Field(None)
class HERDFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
array: Optional[HERDFilesArray] = Field(None)
class HERDEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
array: Optional[HERDEntitiesArray] = Field(None)
class HERDObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
array: Optional[HERDObjectsArray] = Field(None)
class HERDObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
array: Optional[HERDObjectKeysArray] = Field(None)
class HERDEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
array: Optional[HERDEntityKeysArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class HERD(Container):
"""
HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files.
"""
keys: HERDKeys = Field(<built-in method keys of dict object at 0x10ad57dc0>, description="""A table for storing user terms that are used to refer to external resources.""")
files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""")
entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""")
objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""")
object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""")
entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
HERDKeysArray.update_forward_refs()
HERDFilesArray.update_forward_refs()
HERDEntitiesArray.update_forward_refs()
HERDObjectsArray.update_forward_refs()
HERDObjectKeysArray.update_forward_refs()
HERDEntityKeysArray.update_forward_refs()
Data.update_forward_refs()
HERDKeys.update_forward_refs()
HERDFiles.update_forward_refs()
HERDEntities.update_forward_refs()
HERDObjects.update_forward_refs()
HERDObjectKeys.update_forward_refs()
HERDEntityKeys.update_forward_refs()
Container.update_forward_refs()
HERD.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,92 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()

View file

@ -154,8 +154,14 @@ class GitRepo:
self.cleanup()
else:
if not self.check():
warnings.warn('Destination directory is not empty and does not pass checks for correctness! pass force to overwrite')
return
warnings.warn('Destination directory is not empty and does not pass checks for correctness! cleaning up')
self.cleanup()
else:
# already have it
return
elif self.temp_directory.exists():
# exists but empty
self.cleanup()
res = subprocess.run(['git', 'clone', str(self.namespace.repository), str(self.temp_directory)])
if res.returncode != 0:

170
nwb_linkml/plot.py Normal file
View file

@ -0,0 +1,170 @@
"""
Various visualization routines, mostly to help development for now
"""
from typing import TYPE_CHECKING, Optional, List, TypedDict, Union
from rich import print
import random
from dash import Dash, html
import dash_cytoscape as cyto
cyto.load_extra_layouts()
from nwb_schema_language import Namespace, Group, Dataset
from nwb_linkml.io import load_nwb_core
if TYPE_CHECKING:
from nwb_linkml.adapters import NamespacesAdapter
# from nwb_schema_language.datamodel import Namespaces
class _CytoNode(TypedDict):
id: str
label: str
class _CytoEdge(TypedDict):
source: str
target: str
class CytoElement(TypedDict):
data: _CytoEdge | _CytoNode
classes: Optional[str]
class Node:
def __init__(self,
id: str,
label: str,
klass: str,
parent: Optional[str] = None):
self.id = id
self.label = label
self.parent = parent
self.klass = klass
def make(self) -> List[CytoElement]:
node = [
CytoElement(data= _CytoNode(id=self.id, label=self.label), classes=self.klass)
]
if self.parent:
edge = [
CytoElement(data=_CytoEdge(source=self.parent, target=self.id))
]
node += edge
return node
def make_node(element: Group | Dataset, parent=None, recurse:bool=True) -> List[Node]:
if element.neurodata_type_def is None:
if element.name is None:
if element.neurodata_type_inc is None:
name = 'anonymous'
else:
name = element.neurodata_type_inc
else:
name = element.name
id = name + '-' + str(random.randint(0,1000))
label = id
classname = str(type(element).__name__).lower() + '-child'
else:
id = element.neurodata_type_def
label = element.neurodata_type_def
classname = str(type(element).__name__).lower()
if parent is None:
parent = element.neurodata_type_inc
node = Node(
id=id,
label=label,
parent=parent,
klass=classname
)
nodes = [node]
if isinstance(element, Group) and recurse:
for group in element.groups:
nodes += make_node(group, parent=id)
for dataset in element.datasets:
nodes += make_node(dataset, parent=id)
return nodes
def make_graph(namespaces: 'NamespacesAdapter', recurse:bool=True) -> List[CytoElement]:
namespaces.populate_imports()
nodes = []
element: Namespace | Group | Dataset
print('walking graph')
i = 0
for element in namespaces.walk_types(namespaces, (Group, Dataset)):
if element.neurodata_type_def is None:
# skip child nodes at top level, we'll get them in recursion
continue
if any([element.neurodata_type_def == node.id for node in nodes]):
continue
nodes.extend(make_node(element, recurse=recurse))
print('making elements')
cytoelements = []
for node in nodes:
cytoelements += node.make()
print(cytoelements)
return cytoelements
def plot_dependency_graph(namespaces: 'NamespacesAdapter', recurse:bool=True) -> Dash:
graph = make_graph(namespaces, recurse=recurse)
app = Dash(__name__)
styles = [
{
'selector': 'node',
'style': {
'content': 'data(label)'
}
},
{
'selector': '.dataset',
'style': {
'background-color': 'red',
'shape': 'rectangle'
}
},
{
'selector': '.group',
'style': {
'background-color': 'blue',
'shape': 'rectangle'
}
},
{
'selector': '.dataset-child',
'style': {
'background-color': 'red'
}
},
{
'selector': '.group-child',
'style': {
'background-color': 'blue'
}
}
]
app.layout = html.Div([
cyto.Cytoscape(
id='nwb_graph',
elements = graph,
style={'width': '100%', 'height': '100vh'},
layout= {'name': 'klay', 'rankDir': 'LR'},
stylesheet=styles
)
])
return app
if __name__ == "__main__":
core = load_nwb_core()
app = plot_dependency_graph(core, recurse=True)
print('opening dash')
app.run(debug=True)

View file

@ -0,0 +1,359 @@
name: core.nwb.base
id: core.nwb.base
imports:
- hdmf-common.base
- hdmf-common.table
- nwb.language
default_prefix: core.nwb.base/
classes:
NWBData:
name: NWBData
description: An abstract data type for a dataset.
is_a: Data
TimeSeriesReferenceVectorData:
name: TimeSeriesReferenceVectorData
description: Column storing references to a TimeSeries (rows). For each TimeSeries
this VectorData column stores the start_index and count to indicate the range
in time to be selected as well as an object reference to the TimeSeries.
is_a: VectorData
Image:
name: Image
description: An abstract data type for an image. Shape can be 2-D (x, y), or 3-D
where the third dimension can have three or four elements, e.g. (x, y, (r, g,
b)) or (x, y, (r, g, b, a)).
is_a: NWBData
attributes:
resolution:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
description:
name: description
description: Description of the image.
range: text
array:
name: array
range: Image_Array
Image_Array:
name: Image_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
ImageReferences:
name: ImageReferences
description: Ordered dataset of references to Image objects.
is_a: NWBData
attributes:
array:
name: array
range: ImageReferences_Array
ImageReferences_Array:
name: ImageReferences_Array
is_a: Arraylike
attributes:
num_images:
name: num_images
range: Image
required: true
NWBContainer:
name: NWBContainer
description: An abstract data type for a generic container storing collections
of data and metadata. Base type for all data and metadata containers.
is_a: Container
NWBDataInterface:
name: NWBDataInterface
description: An abstract data type for a generic container storing collections
of data, as opposed to metadata.
is_a: NWBContainer
TimeSeries:
name: TimeSeries
description: General purpose time series.
is_a: NWBDataInterface
attributes:
description:
name: description
description: Description of the time series.
range: text
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
range: text
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
dimension should always represent time. This can also be used to store binary
data (e.g., image frames). This can also be a link to data stored in an
external file.
multivalued: false
range: TimeSeries_data
required: true
starting_time:
name: starting_time
description: Timestamp of the first sample in seconds. When timestamps are
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
multivalued: false
range: TimeSeries_starting_time
required: false
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
the common experiment master-clock stored in NWBFile.timestamps_reference_time.
multivalued: false
range: TimeSeries_timestamps
required: false
control:
name: control
description: Numerical labels that apply to each time point in data for the
purpose of querying and slicing data by these values. If present, the length
of this array should be the same size as the first dimension of data.
multivalued: false
range: TimeSeries_control
required: false
control_description:
name: control_description
description: Description of each control value. Must be present if control
is present. If present, control_description[0] should describe time points
where control == 0.
multivalued: false
range: TimeSeries_control_description
required: false
sync:
name: sync
description: Lab-specific time and sync information as provided directly from
hardware devices and that is necessary for aligning all acquired time information
to a common timebase. The timestamp array stores time in the common timebase.
This group will usually only be populated in TimeSeries that are stored
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
multivalued: false
range: TimeSeries_sync
required: false
TimeSeries_data:
name: TimeSeries_data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension
should always represent time. This can also be used to store binary data (e.g.,
image frames). This can also be a link to data stored in an external file.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as signed 16-bit
integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
range: float32
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
its coercion to the specified 'unit'. Two common examples of this include
(a) data stored in an unsigned type that requires a shift after scaling
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
"instantaneous", or "step". For example, a voltage trace would be "continuous",
because samples are recorded from a continuous process. An array of lick
times would be "instantaneous", because the data represents distinct moments
in time. Times of image presentations would be "step" because the picture
remains the same until the next timepoint. This field is optional, but is
useful in providing information about the underlying data. It may inform
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
array:
name: array
range: TimeSeries_data_Array
TimeSeries_data_Array:
name: TimeSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: AnyType
required: true
num_DIM2:
name: num_DIM2
range: AnyType
required: false
num_DIM3:
name: num_DIM3
range: AnyType
required: false
num_DIM4:
name: num_DIM4
range: AnyType
required: false
TimeSeries_starting_time:
name: TimeSeries_starting_time
description: Timestamp of the first sample in seconds. When timestamps are uniformly
spaced, the timestamp of the first sample can be specified and all subsequent
ones calculated from the sampling rate attribute.
attributes:
rate:
name: rate
description: Sampling rate, in Hz.
range: float32
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
range: text
TimeSeries_timestamps:
name: TimeSeries_timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
array:
name: array
range: TimeSeries_timestamps_Array
TimeSeries_timestamps_Array:
name: TimeSeries_timestamps_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: float64
required: true
TimeSeries_control:
name: TimeSeries_control
description: Numerical labels that apply to each time point in data for the purpose
of querying and slicing data by these values. If present, the length of this
array should be the same size as the first dimension of data.
attributes:
array:
name: array
range: TimeSeries_control_Array
TimeSeries_control_Array:
name: TimeSeries_control_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: uint8
required: true
TimeSeries_control_description:
name: TimeSeries_control_description
description: Description of each control value. Must be present if control is
present. If present, control_description[0] should describe time points where
control == 0.
attributes:
array:
name: array
range: TimeSeries_control_description_Array
TimeSeries_control_description_Array:
name: TimeSeries_control_description_Array
is_a: Arraylike
attributes:
num_control_values:
name: num_control_values
range: text
required: true
TimeSeries_sync:
name: TimeSeries_sync
description: Lab-specific time and sync information as provided directly from
hardware devices and that is necessary for aligning all acquired time information
to a common timebase. The timestamp array stores time in the common timebase.
This group will usually only be populated in TimeSeries that are stored external
to the NWB file, in files storing raw data. Once timestamp data is calculated,
the contents of 'sync' are mostly for archival purposes.
ProcessingModule:
name: ProcessingModule
description: A collection of processed data.
is_a: NWBContainer
attributes:
description:
name: description
description: Description of this collection of processed data.
range: text
NWBDataInterface:
name: NWBDataInterface
description: Data objects stored in this collection.
multivalued: true
range: NWBDataInterface
required: false
DynamicTable:
name: DynamicTable
description: Tables stored in this collection.
multivalued: true
range: DynamicTable
required: false
Images:
name: Images
description: A collection of images with an optional way to specify the order
of the images using the "order_of_images" dataset. An order must be specified
if the images are referenced by index, e.g., from an IndexSeries.
is_a: NWBDataInterface
attributes:
description:
name: description
description: Description of this collection of images.
range: text
Image:
name: Image
description: Images stored in this collection.
multivalued: true
range: Image
required: true
order_of_images:
name: order_of_images
description: Ordered dataset of references to Image objects stored in the
parent group. Each Image object in the Images group should be stored once
and only once, so the dataset should have the same length as the number
of images.
multivalued: false
range: Images_order_of_images
required: false
Images_order_of_images:
name: Images_order_of_images
description: Ordered dataset of references to Image objects stored in the parent
group. Each Image object in the Images group should be stored once and only
once, so the dataset should have the same length as the number of images.
is_a: ImageReferences

View file

@ -0,0 +1,172 @@
name: core.nwb.behavior
id: core.nwb.behavior
imports:
- core.nwb.base
- core.nwb.misc
- nwb.language
default_prefix: core.nwb.behavior/
classes:
SpatialSeries:
name: SpatialSeries
description: 'Direction, e.g., of gaze or travel, or position. The TimeSeries::data
field is a 2D array storing position or direction relative to some reference
frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries
has a text dataset reference_frame that indicates the zero-position, or the
zero-axes for direction. For example, if representing gaze direction, ''straight-ahead''
might be a specific pixel on the monitor, or some other point in space. For
position data, the 0,0 point might be the top-left corner of an enclosure, as
viewed from the tracking camera. The unit of data will indicate how to interpret
SpatialSeries values.'
is_a: TimeSeries
attributes:
data:
name: data
description: 1-D or 2-D array storing position or direction relative to some
reference frame.
multivalued: false
range: SpatialSeries_data
required: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
multivalued: false
range: SpatialSeries_reference_frame
required: false
SpatialSeries_data:
name: SpatialSeries_data
description: 1-D or 2-D array storing position or direction relative to some reference
frame.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
array:
name: array
range: SpatialSeries_data_Array
SpatialSeries_data_Array:
name: SpatialSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
x:
name: x
range: numeric
required: false
minimum_cardinality: 1
maximum_cardinality: 1
x,y:
name: x,y
range: numeric
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x,y,z:
name: x,y,z
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
SpatialSeries_reference_frame:
name: SpatialSeries_reference_frame
description: Description defining what exactly 'straight-ahead' means.
BehavioralEpochs:
name: BehavioralEpochs
description: TimeSeries for storing behavioral epochs. The objective of this
and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries)
is to provide generic hooks for software tools/scripts. This allows a tool/script
to take the output one specific interface (e.g., UnitTimes) and plot that data
relative to another data modality (e.g., behavioral events) without having to
define all possible modalities in advance. Declaring one of these interfaces
means that one or more TimeSeries of the specified type is published. These
TimeSeries should reside in a group having the same name as the interface. For
example, if a BehavioralTimeSeries interface is declared, the module will have
one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'.
BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
IntervalSeries:
name: IntervalSeries
description: IntervalSeries object containing start and stop times of epochs.
multivalued: true
range: IntervalSeries
required: false
BehavioralEvents:
name: BehavioralEvents
description: TimeSeries for storing behavioral events. See description of <a href="#BehavioralEpochs">BehavioralEpochs</a>
for more details.
is_a: NWBDataInterface
attributes:
TimeSeries:
name: TimeSeries
description: TimeSeries object containing behavioral events.
multivalued: true
range: TimeSeries
required: false
BehavioralTimeSeries:
name: BehavioralTimeSeries
description: TimeSeries for storing Behavoioral time series data. See description
of <a href="#BehavioralEpochs">BehavioralEpochs</a> for more details.
is_a: NWBDataInterface
attributes:
TimeSeries:
name: TimeSeries
description: TimeSeries object containing continuous behavioral data.
multivalued: true
range: TimeSeries
required: false
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
TimeSeries:
name: TimeSeries
description: TimeSeries object containing time series data on pupil size.
multivalued: true
range: TimeSeries
required: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
SpatialSeries:
name: SpatialSeries
description: SpatialSeries object containing data measuring direction of gaze.
multivalued: true
range: SpatialSeries
required: false
CompassDirection:
name: CompassDirection
description: With a CompassDirection interface, a module publishes a SpatialSeries
object representing a floating point value for theta. The SpatialSeries::reference_frame
field should indicate what direction corresponds to 0 and which is the direction
of rotation (this should be clockwise). The si_unit for the SpatialSeries should
be radians or degrees.
is_a: NWBDataInterface
attributes:
SpatialSeries:
name: SpatialSeries
description: SpatialSeries object containing direction of gaze travel.
multivalued: true
range: SpatialSeries
required: false
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
SpatialSeries:
name: SpatialSeries
description: SpatialSeries object containing position data.
multivalued: true
range: SpatialSeries
required: true

View file

@ -0,0 +1,22 @@
name: core.nwb.device
id: core.nwb.device
imports:
- core.nwb.base
- nwb.language
default_prefix: core.nwb.device/
classes:
Device:
name: Device
description: Metadata about a data acquisition device, e.g., recording system,
electrode, microscope.
is_a: NWBContainer
attributes:
description:
name: description
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text

View file

@ -0,0 +1,588 @@
name: core.nwb.ecephys
id: core.nwb.ecephys
imports:
- core.nwb.base
- hdmf-common.table
- nwb.language
default_prefix: core.nwb.ecephys/
classes:
ElectricalSeries:
name: ElectricalSeries
description: A time series of acquired voltage data from extracellular recordings.
The data field is an int or float array storing data in volts. The first dimension
should always represent time. The second dimension, if present, should represent
channels.
is_a: TimeSeries
attributes:
filtering:
name: filtering
description: Filtering applied to all channels of the data. For example, if
this ElectricalSeries represents high-pass-filtered data (also known as
AP Band), then this value could be "High-pass 4-pole Bessel filter at 500
Hz". If this ElectricalSeries represents low-pass-filtered LFP data and
the type of filter is unknown, then this value could be "Low-pass filter
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
data:
name: data
description: Recorded voltage data.
multivalued: false
range: ElectricalSeries_data
required: true
electrodes:
name: electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
multivalued: false
range: ElectricalSeries_electrodes
required: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
'data' dataset by these values along the channel axis (as indicated by axis
attribute) AND by the global conversion factor in the 'conversion' attribute
of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion
* channel_conversion. This approach allows for both global and per-channel
data conversion factors needed to support the storage of electrical recordings
as native values generated by data acquisition systems. If this dataset
is not present, then there is no channel-specific conversion factor, i.e.
it is 1 for all channels.
multivalued: false
range: ElectricalSeries_channel_conversion
required: false
ElectricalSeries_data:
name: ElectricalSeries_data
description: Recorded voltage data.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. This value
is fixed to 'volts'. Actual stored values are not necessarily stored in
these units. To access the data in these units, multiply 'data' by 'conversion',
followed by 'channel_conversion' (if present), and then add 'offset'.
range: text
array:
name: array
range: ElectricalSeries_data_Array
ElectricalSeries_data_Array:
name: ElectricalSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
num_samples:
name: num_samples
range: numeric
required: false
ElectricalSeries_electrodes:
name: ElectricalSeries_electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
ElectricalSeries_channel_conversion:
name: ElectricalSeries_channel_conversion
description: Channel-specific conversion factor. Multiply the data in the 'data'
dataset by these values along the channel axis (as indicated by axis attribute)
AND by the global conversion factor in the 'conversion' attribute of 'data'
to get the data values in Volts, i.e, data in Volts = data * data.conversion
* channel_conversion. This approach allows for both global and per-channel data
conversion factors needed to support the storage of electrical recordings as
native values generated by data acquisition systems. If this dataset is not
present, then there is no channel-specific conversion factor, i.e. it is 1 for
all channels.
attributes:
axis:
name: axis
description: The zero-indexed axis of the 'data' dataset that the channel-specific
conversion factor corresponds to. This value is fixed to 1.
range: int32
array:
name: array
range: ElectricalSeries_channel_conversion_Array
ElectricalSeries_channel_conversion_Array:
name: ElectricalSeries_channel_conversion_Array
is_a: Arraylike
attributes:
num_channels:
name: num_channels
range: float32
required: true
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
crossings). This may also be raw data, as reported by ephys hardware. If so,
the TimeSeries::description field should describe how events were detected.
All SpikeEventSeries should reside in a module (under EventWaveform interface)
even if the spikes were reported and stored by hardware. All events span the
same recording channels and store snapshots of equal duration. TimeSeries::data
array structure: [num events] [num channels] [num samples] (or [num events]
[num samples] for single electrode).'
is_a: ElectricalSeries
attributes:
data:
name: data
description: Spike waveforms.
multivalued: false
range: SpikeEventSeries_data
required: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
the common experiment master-clock stored in NWBFile.timestamps_reference_time.
Timestamps are required for the events. Unlike for TimeSeries, timestamps
are required for SpikeEventSeries and are thus re-specified here.
multivalued: false
range: SpikeEventSeries_timestamps
required: true
SpikeEventSeries_data:
name: SpikeEventSeries_data
description: Spike waveforms.
attributes:
unit:
name: unit
description: Unit of measurement for waveforms, which is fixed to 'volts'.
range: text
array:
name: array
range: SpikeEventSeries_data_Array
SpikeEventSeries_data_Array:
name: SpikeEventSeries_data_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: numeric
required: true
num_samples:
name: num_samples
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
SpikeEventSeries_timestamps:
name: SpikeEventSeries_timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
Timestamps are required for the events. Unlike for TimeSeries, timestamps are
required for SpikeEventSeries and are thus re-specified here.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
array:
name: array
range: SpikeEventSeries_timestamps_Array
SpikeEventSeries_timestamps_Array:
name: SpikeEventSeries_timestamps_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: float64
required: true
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
in a SpikeEventSeries or other source.
is_a: NWBDataInterface
attributes:
description:
name: description
description: Description of features (eg, ''PC1'') for each of the extracted
features.
multivalued: false
range: FeatureExtraction_description
required: true
features:
name: features
description: Multi-dimensional array of features extracted from each event.
multivalued: false
range: FeatureExtraction_features
required: true
times:
name: times
description: Times of events that features correspond to (can be a link).
multivalued: false
range: FeatureExtraction_times
required: true
electrodes:
name: electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
multivalued: false
range: FeatureExtraction_electrodes
required: true
FeatureExtraction_description:
name: FeatureExtraction_description
description: Description of features (eg, ''PC1'') for each of the extracted features.
attributes:
array:
name: array
range: FeatureExtraction_description_Array
FeatureExtraction_description_Array:
name: FeatureExtraction_description_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
required: true
FeatureExtraction_features:
name: FeatureExtraction_features
description: Multi-dimensional array of features extracted from each event.
attributes:
array:
name: array
range: FeatureExtraction_features_Array
FeatureExtraction_features_Array:
name: FeatureExtraction_features_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float32
required: false
num_channels:
name: num_channels
range: float32
required: false
num_features:
name: num_features
range: float32
required: false
FeatureExtraction_times:
name: FeatureExtraction_times
description: Times of events that features correspond to (can be a link).
attributes:
array:
name: array
range: FeatureExtraction_times_Array
FeatureExtraction_times_Array:
name: FeatureExtraction_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
required: true
FeatureExtraction_electrodes:
name: FeatureExtraction_electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
EventDetection:
name: EventDetection
description: Detected spike events from voltage trace(s).
is_a: NWBDataInterface
attributes:
detection_method:
name: detection_method
description: Description of how events were detected, such as voltage threshold,
or dV/dT threshold, as well as relevant values.
multivalued: false
range: EventDetection_detection_method
required: true
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
corresponding to time of event. ''description'' should define what is meant
by time of event (e.g., .25 ms before action potential peak, zero-crossing
time, etc). The index points to each event from the raw data.
multivalued: false
range: EventDetection_source_idx
required: true
times:
name: times
description: Timestamps of events, in seconds.
multivalued: false
range: EventDetection_times
required: true
EventDetection_detection_method:
name: EventDetection_detection_method
description: Description of how events were detected, such as voltage threshold,
or dV/dT threshold, as well as relevant values.
EventDetection_source_idx:
name: EventDetection_source_idx
description: Indices (zero-based) into source ElectricalSeries::data array corresponding
to time of event. ''description'' should define what is meant by time of event
(e.g., .25 ms before action potential peak, zero-crossing time, etc). The index
points to each event from the raw data.
attributes:
array:
name: array
range: EventDetection_source_idx_Array
EventDetection_source_idx_Array:
name: EventDetection_source_idx_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: int32
required: true
EventDetection_times:
name: EventDetection_times
description: Timestamps of events, in seconds.
attributes:
unit:
name: unit
description: Unit of measurement for event times, which is fixed to 'seconds'.
range: text
array:
name: array
range: EventDetection_times_Array
EventDetection_times_Array:
name: EventDetection_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
required: true
EventWaveform:
name: EventWaveform
description: Represents either the waveforms of detected events, as extracted
from a raw data trace in /acquisition, or the event waveforms that were stored
during experiment acquisition.
is_a: NWBDataInterface
attributes:
SpikeEventSeries:
name: SpikeEventSeries
description: SpikeEventSeries object(s) containing detected spike event waveforms.
multivalued: true
range: SpikeEventSeries
required: false
FilteredEphys:
name: FilteredEphys
description: Electrophysiology data from one or more channels that has been subjected
to filtering. Examples of filtered data include Theta and Gamma (LFP has its
own interface). FilteredEphys modules publish an ElectricalSeries for each filtered
channel or set of channels. The name of each ElectricalSeries is arbitrary but
should be informative. The source of the filtered data, whether this is from
analysis of another time series or as acquired by hardware, should be noted
in each's TimeSeries::description field. There is no assumed 1::1 correspondence
between filtered ephys signals and electrodes, as a single signal can apply
to many nearby electrodes, and one electrode may have different filtered (e.g.,
theta and/or gamma) signals represented. Filter properties should be noted in
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
ElectricalSeries:
name: ElectricalSeries
description: ElectricalSeries object(s) containing filtered electrophysiology
data.
multivalued: true
range: ElectricalSeries
required: true
LFP:
name: LFP
description: LFP data from one or more channels. The electrode map in each published
ElectricalSeries will identify which channels are providing LFP data. Filter
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
ElectricalSeries:
name: ElectricalSeries
description: ElectricalSeries object(s) containing LFP data for one or more
channels.
multivalued: true
range: ElectricalSeries
required: true
ElectrodeGroup:
name: ElectrodeGroup
description: A physical grouping of electrodes, e.g. a shank of an array.
is_a: NWBContainer
attributes:
description:
name: description
description: Description of this electrode group.
range: text
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
position:
name: position
description: stereotaxic or common framework coordinates
multivalued: false
range: ElectrodeGroup_position
required: false
ElectrodeGroup_position:
name: ElectrodeGroup_position
description: stereotaxic or common framework coordinates
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
of the different clusters. Ideally, the waveform analysis should be performed
on data that is only high-pass filtered. This is a separate module because it
is expected to require updating. For example, IMEC probes may require different
storage requirements to store/display mean waveforms, requiring a new interface
or an extension of this one.
is_a: NWBDataInterface
attributes:
waveform_filtering:
name: waveform_filtering
description: Filtering applied to data before generating mean/sd
multivalued: false
range: ClusterWaveforms_waveform_filtering
required: true
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
each wave as cluster numbers in the associated Clustering module (i.e, cluster
3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence
should be empty (e.g., zero- filled)
multivalued: false
range: ClusterWaveforms_waveform_mean
required: true
waveform_sd:
name: waveform_sd
description: Stdev of waveforms for each cluster, using the same indices as
in mean
multivalued: false
range: ClusterWaveforms_waveform_sd
required: true
ClusterWaveforms_waveform_filtering:
name: ClusterWaveforms_waveform_filtering
description: Filtering applied to data before generating mean/sd
ClusterWaveforms_waveform_mean:
name: ClusterWaveforms_waveform_mean
description: The mean waveform for each cluster, using the same indices for each
wave as cluster numbers in the associated Clustering module (i.e, cluster 3
is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should
be empty (e.g., zero- filled)
attributes:
array:
name: array
range: ClusterWaveforms_waveform_mean_Array
ClusterWaveforms_waveform_mean_Array:
name: ClusterWaveforms_waveform_mean_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
ClusterWaveforms_waveform_sd:
name: ClusterWaveforms_waveform_sd
description: Stdev of waveforms for each cluster, using the same indices as in
mean
attributes:
array:
name: array
range: ClusterWaveforms_waveform_sd_Array
ClusterWaveforms_waveform_sd_Array:
name: ClusterWaveforms_waveform_sd_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
Clustering:
name: Clustering
description: DEPRECATED Clustered spike data, whether from automatic clustering
tools (e.g., klustakwik) or as a result of manual sorting.
is_a: NWBDataInterface
attributes:
description:
name: description
description: Description of clusters or clustering, (e.g. cluster 0 is noise,
clusters curated using Klusters, etc)
multivalued: false
range: Clustering_description
required: true
num:
name: num
description: Cluster number of each event
multivalued: false
range: Clustering_num
required: true
peak_over_rms:
name: peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
multivalued: false
range: Clustering_peak_over_rms
required: true
times:
name: times
description: Times of clustered events, in seconds. This may be a link to
times field in associated FeatureExtraction module.
multivalued: false
range: Clustering_times
required: true
Clustering_description:
name: Clustering_description
description: Description of clusters or clustering, (e.g. cluster 0 is noise,
clusters curated using Klusters, etc)
Clustering_num:
name: Clustering_num
description: Cluster number of each event
attributes:
array:
name: array
range: Clustering_num_Array
Clustering_num_Array:
name: Clustering_num_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: int32
required: true
Clustering_peak_over_rms:
name: Clustering_peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
attributes:
array:
name: array
range: Clustering_peak_over_rms_Array
Clustering_peak_over_rms_Array:
name: Clustering_peak_over_rms_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: true
Clustering_times:
name: Clustering_times
description: Times of clustered events, in seconds. This may be a link to times
field in associated FeatureExtraction module.
attributes:
array:
name: array
range: Clustering_times_Array
Clustering_times_Array:
name: Clustering_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
required: true

View file

@ -0,0 +1,74 @@
name: core.nwb.epoch
id: core.nwb.epoch
imports:
- hdmf-common.table
- core.nwb.base
- nwb.language
default_prefix: core.nwb.epoch/
classes:
TimeIntervals:
name: TimeIntervals
description: A container for aggregating epoch data and the TimeSeries that each
epoch applies to.
is_a: DynamicTable
attributes:
start_time:
name: start_time
description: Start time of epoch, in seconds.
multivalued: false
range: TimeIntervals_start_time
required: true
stop_time:
name: stop_time
description: Stop time of epoch, in seconds.
multivalued: false
range: TimeIntervals_stop_time
required: true
tags:
name: tags
description: User-defined tags that identify or categorize events.
multivalued: false
range: TimeIntervals_tags
required: false
tags_index:
name: tags_index
description: Index for tags.
multivalued: false
range: TimeIntervals_tags_index
required: false
timeseries:
name: timeseries
description: An index into a TimeSeries object.
multivalued: false
range: TimeIntervals_timeseries
required: false
timeseries_index:
name: timeseries_index
description: Index for timeseries.
multivalued: false
range: TimeIntervals_timeseries_index
required: false
TimeIntervals_start_time:
name: TimeIntervals_start_time
description: Start time of epoch, in seconds.
is_a: VectorData
TimeIntervals_stop_time:
name: TimeIntervals_stop_time
description: Stop time of epoch, in seconds.
is_a: VectorData
TimeIntervals_tags:
name: TimeIntervals_tags
description: User-defined tags that identify or categorize events.
is_a: VectorData
TimeIntervals_tags_index:
name: TimeIntervals_tags_index
description: Index for tags.
is_a: VectorIndex
TimeIntervals_timeseries:
name: TimeIntervals_timeseries
description: An index into a TimeSeries object.
is_a: TimeSeriesReferenceVectorData
TimeIntervals_timeseries_index:
name: TimeIntervals_timeseries_index
description: Index for timeseries.
is_a: VectorIndex

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,704 @@
name: core.nwb.icephys
id: core.nwb.icephys
imports:
- core.nwb.base
- hdmf-common.table
- nwb.language
default_prefix: core.nwb.icephys/
classes:
PatchClampSeries:
name: PatchClampSeries
description: An abstract base class for patch-clamp data - stimulus or response,
current or voltage.
is_a: TimeSeries
attributes:
stimulus_description:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
data:
name: data
description: Recorded voltage or current.
multivalued: false
range: PatchClampSeries_data
required: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
multivalued: false
range: PatchClampSeries_gain
required: false
PatchClampSeries_data:
name: PatchClampSeries_data
description: Recorded voltage or current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
array:
name: array
range: PatchClampSeries_data_Array
PatchClampSeries_data_Array:
name: PatchClampSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
PatchClampSeries_gain:
name: PatchClampSeries_gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).
CurrentClampSeries:
name: CurrentClampSeries
description: Voltage data from an intracellular current-clamp recording. A corresponding
CurrentClampStimulusSeries (stored separately as a stimulus) is used to store
the current injected.
is_a: PatchClampSeries
attributes:
data:
name: data
description: Recorded voltage.
multivalued: false
range: CurrentClampSeries_data
required: true
bias_current:
name: bias_current
description: Bias current, in amps.
multivalued: false
range: CurrentClampSeries_bias_current
required: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms.
multivalued: false
range: CurrentClampSeries_bridge_balance
required: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads.
multivalued: false
range: CurrentClampSeries_capacitance_compensation
required: false
CurrentClampSeries_data:
name: CurrentClampSeries_data
description: Recorded voltage.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
CurrentClampSeries_bias_current:
name: CurrentClampSeries_bias_current
description: Bias current, in amps.
CurrentClampSeries_bridge_balance:
name: CurrentClampSeries_bridge_balance
description: Bridge balance, in ohms.
CurrentClampSeries_capacitance_compensation:
name: CurrentClampSeries_capacitance_compensation
description: Capacitance compensation, in farads.
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There
is no CurrentClampStimulusSeries associated with an IZero series because the
amplifier is disconnected and no stimulus can reach the cell.
is_a: CurrentClampSeries
attributes:
stimulus_description:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
range: text
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_bias_current
required: true
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_bridge_balance
required: true
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_capacitance_compensation
required: true
IZeroClampSeries_bias_current:
name: IZeroClampSeries_bias_current
description: Bias current, in amps, fixed to 0.0.
IZeroClampSeries_bridge_balance:
name: IZeroClampSeries_bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
IZeroClampSeries_capacitance_compensation:
name: IZeroClampSeries_capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
description: Stimulus current applied during current clamp recording.
is_a: PatchClampSeries
attributes:
data:
name: data
description: Stimulus current applied.
multivalued: false
range: CurrentClampStimulusSeries_data
required: true
CurrentClampStimulusSeries_data:
name: CurrentClampStimulusSeries_data
description: Stimulus current applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
VoltageClampStimulusSeries (stored separately as a stimulus) is used to store
the voltage injected.
is_a: PatchClampSeries
attributes:
data:
name: data
description: Recorded current.
multivalued: false
range: VoltageClampSeries_data
required: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
multivalued: false
range: VoltageClampSeries_capacitance_fast
required: false
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
multivalued: false
range: VoltageClampSeries_capacitance_slow
required: false
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
multivalued: false
range: VoltageClampSeries_resistance_comp_bandwidth
required: false
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
multivalued: false
range: VoltageClampSeries_resistance_comp_correction
required: false
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
multivalued: false
range: VoltageClampSeries_resistance_comp_prediction
required: false
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
multivalued: false
range: VoltageClampSeries_whole_cell_capacitance_comp
required: false
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
multivalued: false
range: VoltageClampSeries_whole_cell_series_resistance_comp
required: false
VoltageClampSeries_data:
name: VoltageClampSeries_data
description: Recorded current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries_capacitance_fast:
name: VoltageClampSeries_capacitance_fast
description: Fast capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries_capacitance_slow:
name: VoltageClampSeries_capacitance_slow
description: Slow capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries_resistance_comp_bandwidth:
name: VoltageClampSeries_resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
range: text
VoltageClampSeries_resistance_comp_correction:
name: VoltageClampSeries_resistance_comp_correction
description: Resistance compensation correction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
range: text
VoltageClampSeries_resistance_comp_prediction:
name: VoltageClampSeries_resistance_comp_prediction
description: Resistance compensation prediction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
range: text
VoltageClampSeries_whole_cell_capacitance_comp:
name: VoltageClampSeries_whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
range: text
VoltageClampSeries_whole_cell_series_resistance_comp:
name: VoltageClampSeries_whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
range: text
VoltageClampStimulusSeries:
name: VoltageClampStimulusSeries
description: Stimulus voltage applied during a voltage clamp recording.
is_a: PatchClampSeries
attributes:
data:
name: data
description: Stimulus voltage applied.
multivalued: false
range: VoltageClampStimulusSeries_data
required: true
VoltageClampStimulusSeries_data:
name: VoltageClampStimulusSeries_data
description: Stimulus voltage applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
is_a: NWBContainer
attributes:
cell_id:
name: cell_id
description: unique ID of the cell
multivalued: false
range: IntracellularElectrode_cell_id
required: false
description:
name: description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
multivalued: false
range: IntracellularElectrode_description
required: true
filtering:
name: filtering
description: Electrode specific filtering.
multivalued: false
range: IntracellularElectrode_filtering
required: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
multivalued: false
range: IntracellularElectrode_initial_access_resistance
required: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: IntracellularElectrode_location
required: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
multivalued: false
range: IntracellularElectrode_resistance
required: false
seal:
name: seal
description: Information about seal used for recording.
multivalued: false
range: IntracellularElectrode_seal
required: false
slice:
name: slice
description: Information about slice used for recording.
multivalued: false
range: IntracellularElectrode_slice
required: false
IntracellularElectrode_cell_id:
name: IntracellularElectrode_cell_id
description: unique ID of the cell
IntracellularElectrode_description:
name: IntracellularElectrode_description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
IntracellularElectrode_filtering:
name: IntracellularElectrode_filtering
description: Electrode specific filtering.
IntracellularElectrode_initial_access_resistance:
name: IntracellularElectrode_initial_access_resistance
description: Initial access resistance.
IntracellularElectrode_location:
name: IntracellularElectrode_location
description: Location of the electrode. Specify the area, layer, comments on estimation
of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names
for anatomical regions when possible.
IntracellularElectrode_resistance:
name: IntracellularElectrode_resistance
description: Electrode resistance, in ohms.
IntracellularElectrode_seal:
name: IntracellularElectrode_seal
description: Information about seal used for recording.
IntracellularElectrode_slice:
name: IntracellularElectrode_slice
description: Information about slice used for recording.
SweepTable:
name: SweepTable
description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable
is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable
tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
is_a: DynamicTable
attributes:
sweep_number:
name: sweep_number
description: Sweep number of the PatchClampSeries in that row.
multivalued: false
range: SweepTable_sweep_number
required: true
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
multivalued: false
range: SweepTable_series
required: true
series_index:
name: series_index
description: Index for series.
multivalued: false
range: SweepTable_series_index
required: true
SweepTable_sweep_number:
name: SweepTable_sweep_number
description: Sweep number of the PatchClampSeries in that row.
is_a: VectorData
SweepTable_series:
name: SweepTable_series
description: The PatchClampSeries with the sweep number in that row.
is_a: VectorData
SweepTable_series_index:
name: SweepTable_series_index
description: Index for series.
is_a: VectorIndex
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
description: Table for storing intracellular electrode related metadata.
is_a: DynamicTable
attributes:
description:
name: description
description: Description of what is in this dynamic table.
range: text
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
multivalued: false
range: IntracellularElectrodesTable_electrode
required: true
IntracellularElectrodesTable_electrode:
name: IntracellularElectrodesTable_electrode
description: Column for storing the reference to the intracellular electrode.
is_a: VectorData
IntracellularStimuliTable:
name: IntracellularStimuliTable
description: Table for storing intracellular stimulus related metadata.
is_a: DynamicTable
attributes:
description:
name: description
description: Description of what is in this dynamic table.
range: text
stimulus:
name: stimulus
description: Column storing the reference to the recorded stimulus for the
recording (rows).
multivalued: false
range: IntracellularStimuliTable_stimulus
required: true
IntracellularStimuliTable_stimulus:
name: IntracellularStimuliTable_stimulus
description: Column storing the reference to the recorded stimulus for the recording
(rows).
is_a: TimeSeriesReferenceVectorData
IntracellularResponsesTable:
name: IntracellularResponsesTable
description: Table for storing intracellular response related metadata.
is_a: DynamicTable
attributes:
description:
name: description
description: Description of what is in this dynamic table.
range: text
response:
name: response
description: Column storing the reference to the recorded response for the
recording (rows)
multivalued: false
range: IntracellularResponsesTable_response
required: true
IntracellularResponsesTable_response:
name: IntracellularResponsesTable_response
description: Column storing the reference to the recorded response for the recording
(rows)
is_a: TimeSeriesReferenceVectorData
IntracellularRecordingsTable:
name: IntracellularRecordingsTable
description: A table to group together a stimulus and response from a single electrode
and a single simultaneous recording. Each row in the table represents a single
recording consisting typically of a stimulus and a corresponding response. In
some cases, however, only a stimulus or a response is recorded as part of an
experiment. In this case, both the stimulus and response will point to the same
TimeSeries while the idx_start and count of the invalid column will be set to
-1, thus, indicating that no values have been recorded for the stimulus or response,
respectively. Note, a recording MUST contain at least a stimulus or a response.
Typically the stimulus and response are PatchClampSeries. However, the use of
AD/DA channels that are not associated to an electrode is also common in intracellular
electrophysiology, in which case other TimeSeries may be used.
is_a: AlignedDynamicTable
attributes:
description:
name: description
description: Description of the contents of this table. Inherited from AlignedDynamicTable
and overwritten here to fix the value of the attribute.
range: text
electrodes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
multivalued: false
range: IntracellularRecordingsTable_electrodes
required: true
stimuli:
name: stimuli
description: Table for storing intracellular stimulus related metadata.
multivalued: false
range: IntracellularRecordingsTable_stimuli
required: true
responses:
name: responses
description: Table for storing intracellular response related metadata.
multivalued: false
range: IntracellularRecordingsTable_responses
required: true
IntracellularRecordingsTable_electrodes:
name: IntracellularRecordingsTable_electrodes
description: Table for storing intracellular electrode related metadata.
is_a: IntracellularElectrodesTable
IntracellularRecordingsTable_stimuli:
name: IntracellularRecordingsTable_stimuli
description: Table for storing intracellular stimulus related metadata.
is_a: IntracellularStimuliTable
IntracellularRecordingsTable_responses:
name: IntracellularRecordingsTable_responses
description: Table for storing intracellular response related metadata.
is_a: IntracellularResponsesTable
SimultaneousRecordingsTable:
name: SimultaneousRecordingsTable
description: A table for grouping different intracellular recordings from the
IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes.
is_a: DynamicTable
attributes:
recordings:
name: recordings
description: A reference to one or more rows in the IntracellularRecordingsTable
table.
multivalued: false
range: SimultaneousRecordingsTable_recordings
required: true
recordings_index:
name: recordings_index
description: Index dataset for the recordings column.
multivalued: false
range: SimultaneousRecordingsTable_recordings_index
required: true
SimultaneousRecordingsTable_recordings:
name: SimultaneousRecordingsTable_recordings
description: A reference to one or more rows in the IntracellularRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the IntracellularRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
SimultaneousRecordingsTable_recordings_index:
name: SimultaneousRecordingsTable_recordings_index
description: Index dataset for the recordings column.
is_a: VectorIndex
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
table together. This is typically used to group together sequential recordings
where a sequence of stimuli of the same type with varying parameters have been
presented in a sequence.
is_a: DynamicTable
attributes:
simultaneous_recordings:
name: simultaneous_recordings
description: A reference to one or more rows in the SimultaneousRecordingsTable
table.
multivalued: false
range: SequentialRecordingsTable_simultaneous_recordings
required: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
description: Index dataset for the simultaneous_recordings column.
multivalued: false
range: SequentialRecordingsTable_simultaneous_recordings_index
required: true
stimulus_type:
name: stimulus_type
description: The type of stimulus used for the sequential recording.
multivalued: false
range: SequentialRecordingsTable_stimulus_type
required: true
SequentialRecordingsTable_simultaneous_recordings:
name: SequentialRecordingsTable_simultaneous_recordings
description: A reference to one or more rows in the SimultaneousRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SimultaneousRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
SequentialRecordingsTable_simultaneous_recordings_index:
name: SequentialRecordingsTable_simultaneous_recordings_index
description: Index dataset for the simultaneous_recordings column.
is_a: VectorIndex
SequentialRecordingsTable_stimulus_type:
name: SequentialRecordingsTable_stimulus_type
description: The type of stimulus used for the sequential recording.
is_a: VectorData
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
together. With each SequentialRecording typically representing a particular
type of stimulus, the RepetitionsTable table is typically used to group sets
of stimuli applied in sequence.
is_a: DynamicTable
attributes:
sequential_recordings:
name: sequential_recordings
description: A reference to one or more rows in the SequentialRecordingsTable
table.
multivalued: false
range: RepetitionsTable_sequential_recordings
required: true
sequential_recordings_index:
name: sequential_recordings_index
description: Index dataset for the sequential_recordings column.
multivalued: false
range: RepetitionsTable_sequential_recordings_index
required: true
RepetitionsTable_sequential_recordings:
name: RepetitionsTable_sequential_recordings
description: A reference to one or more rows in the SequentialRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SequentialRecordingsTable table that this table
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
RepetitionsTable_sequential_recordings_index:
name: RepetitionsTable_sequential_recordings_index
description: Index dataset for the sequential_recordings column.
is_a: VectorIndex
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental condition.
is_a: DynamicTable
attributes:
repetitions:
name: repetitions
description: A reference to one or more rows in the RepetitionsTable table.
multivalued: false
range: ExperimentalConditionsTable_repetitions
required: true
repetitions_index:
name: repetitions_index
description: Index dataset for the repetitions column.
multivalued: false
range: ExperimentalConditionsTable_repetitions_index
required: true
ExperimentalConditionsTable_repetitions:
name: ExperimentalConditionsTable_repetitions
description: A reference to one or more rows in the RepetitionsTable table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the RepetitionsTable table that this table region
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
ExperimentalConditionsTable_repetitions_index:
name: ExperimentalConditionsTable_repetitions_index
description: Index dataset for the repetitions column.
is_a: VectorIndex

View file

@ -0,0 +1,353 @@
name: core.nwb.image
id: core.nwb.image
imports:
- core.nwb.base
- nwb.language
default_prefix: core.nwb.image/
classes:
GrayscaleImage:
name: GrayscaleImage
description: A grayscale image.
is_a: Image
attributes:
array:
name: array
range: GrayscaleImage_Array
GrayscaleImage_Array:
name: GrayscaleImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
RGBImage:
name: RGBImage
description: A color image.
is_a: Image
attributes:
array:
name: array
range: RGBImage_Array
RGBImage_Array:
name: RGBImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
RGBAImage:
name: RGBAImage
description: A color image with transparency.
is_a: Image
attributes:
array:
name: array
range: RGBAImage_Array
RGBAImage_Array:
name: RGBAImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
ImageSeries:
name: ImageSeries
description: General image data that is common between acquisition and stimulus
time series. Sometimes the image data is stored in the file in a raw format
while other times it will be stored as a series of external image files in the
host file system. The data field will either be binary data, if the data is
stored in the NWB file, or empty, if the data is stored in an external image
stack. [frame][x][y] or [frame][x][y][z].
is_a: TimeSeries
attributes:
data:
name: data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
multivalued: false
range: ImageSeries_data
required: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
multivalued: false
range: ImageSeries_dimension
required: false
external_file:
name: external_file
description: Paths to one or more external file(s). The field is only present
if format='external'. This is only relevant if the image series is stored
in the file system as one or more image file(s). This field should NOT be
used if the image is stored in another NWB file and that file is linked
to this file.
multivalued: false
range: ImageSeries_external_file
required: false
format:
name: format
description: Format of image. If this is 'external', then the attribute 'external_file'
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
multivalued: false
range: ImageSeries_format
required: false
ImageSeries_data:
name: ImageSeries_data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
attributes:
array:
name: array
range: ImageSeries_data_Array
ImageSeries_data_Array:
name: ImageSeries_data_Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
z:
name: z
range: numeric
required: false
ImageSeries_dimension:
name: ImageSeries_dimension
description: Number of pixels on x, y, (and z) axes.
attributes:
array:
name: array
range: ImageSeries_dimension_Array
ImageSeries_dimension_Array:
name: ImageSeries_dimension_Array
is_a: Arraylike
attributes:
rank:
name: rank
range: int32
required: true
ImageSeries_external_file:
name: ImageSeries_external_file
description: Paths to one or more external file(s). The field is only present
if format='external'. This is only relevant if the image series is stored in
the file system as one or more image file(s). This field should NOT be used
if the image is stored in another NWB file and that file is linked to this file.
attributes:
starting_frame:
name: starting_frame
description: Each external image may contain one or more consecutive frames
of the full ImageSeries. This attribute serves as an index to indicate which
frames each file contains, to facilitate random access. The 'starting_frame'
attribute, hence, contains a list of frame numbers within the full ImageSeries
of the first frame of each file listed in the parent 'external_file' dataset.
Zero-based indexing is used (hence, the first element will always be zero).
For example, if the 'external_file' dataset has three paths to files and
the first file has 5 frames, the second file has 10 frames, and the third
file has 20 frames, then this attribute will have values [0, 5, 15]. If
there is a single external file that holds all of the frames of the ImageSeries
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
array:
name: array
range: ImageSeries_external_file_Array
ImageSeries_external_file_Array:
name: ImageSeries_external_file_Array
is_a: Arraylike
attributes:
num_files:
name: num_files
range: text
required: true
ImageSeries_format:
name: ImageSeries_format
description: Format of image. If this is 'external', then the attribute 'external_file'
contains the path information to the image files. If this is 'raw', then the
raw (single-channel) binary data is stored in the 'data' dataset. If this attribute
is not present, then the default format='raw' case is assumed.
ImageMaskSeries:
name: ImageMaskSeries
description: An alpha mask that is applied to a presented visual stimulus. The
'data' array contains an array of mask values that are applied to the displayed
image. Mask values are stored as RGBA. Mask can vary with time. The timestamps
array indicates the starting time of a mask, and that mask pattern continues
until it's explicitly changed.
is_a: ImageSeries
OpticalSeries:
name: OpticalSeries
description: Image data that is presented or recorded. A stimulus template movie
will be stored only as an image. When the image is presented as stimulus, additional
data is required, such as field of view (e.g., how much of the visual field
the image covers, or how what is the area of the target being imaged). If the
OpticalSeries represents acquired imaging data, orientation is also important.
is_a: ImageSeries
attributes:
distance:
name: distance
description: Distance from camera/monitor to target/eye.
multivalued: false
range: OpticalSeries_distance
required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
multivalued: false
range: OpticalSeries_field_of_view
required: false
data:
name: data
description: Images presented to subject, either grayscale or RGB
multivalued: false
range: OpticalSeries_data
required: true
orientation:
name: orientation
description: Description of image relative to some reference frame (e.g.,
which way is up). Must also specify frame of reference.
multivalued: false
range: OpticalSeries_orientation
required: false
OpticalSeries_distance:
name: OpticalSeries_distance
description: Distance from camera/monitor to target/eye.
OpticalSeries_field_of_view:
name: OpticalSeries_field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: OpticalSeries_field_of_view_Array
OpticalSeries_field_of_view_Array:
name: OpticalSeries_field_of_view_Array
is_a: Arraylike
attributes:
width, height:
name: width, height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width, height, depth:
name: width, height, depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
OpticalSeries_data:
name: OpticalSeries_data
description: Images presented to subject, either grayscale or RGB
attributes:
array:
name: array
range: OpticalSeries_data_Array
OpticalSeries_data_Array:
name: OpticalSeries_data_Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
OpticalSeries_orientation:
name: OpticalSeries_orientation
description: Description of image relative to some reference frame (e.g., which
way is up). Must also specify frame of reference.
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
of the IndexSeries is to allow a static image stack to be stored in an Images
object, and the images in the stack to be referenced out-of-order. This can
be for the display of individual images, or of movie segments (as a movie is
simply a series of images). The data field stores the index of the frame in
the referenced Images object, and the timestamps array indicates when that image
was displayed.
is_a: TimeSeries
attributes:
data:
name: data
description: Index of the image (using zero-indexing) in the linked Images
object.
multivalued: false
range: IndexSeries_data
required: true
IndexSeries_data:
name: IndexSeries_data
description: Index of the image (using zero-indexing) in the linked Images object.
attributes:
conversion:
name: conversion
description: This field is unused by IndexSeries.
range: float32
resolution:
name: resolution
description: This field is unused by IndexSeries.
range: float32
offset:
name: offset
description: This field is unused by IndexSeries.
range: float32
unit:
name: unit
description: This field is unused by IndexSeries and has the value N/A.
range: text
array:
name: array
range: IndexSeries_data_Array
IndexSeries_data_Array:
name: IndexSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: uint32
required: true

View file

@ -0,0 +1,614 @@
name: core.nwb.misc
id: core.nwb.misc
imports:
- core.nwb.base
- hdmf-common.table
- nwb.language
default_prefix: core.nwb.misc/
classes:
AbstractFeatureSeries:
name: AbstractFeatureSeries
description: Abstract features, such as quantitative descriptions of sensory stimuli.
The TimeSeries::data field is a 2D array, storing those features (e.g., for
visual grating stimulus this might be orientation, spatial frequency and contrast).
Null stimuli (eg, uniform gray) can be marked as being an independent feature
(eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values,
or through use of the TimeSeries::control fields. A set of features is considered
to persist until the next set of features is defined. The final set of features
stored should be the null set. This is useful when storing the raw stimulus
is impractical.
is_a: TimeSeries
attributes:
data:
name: data
description: Values of each feature at each time.
multivalued: false
range: AbstractFeatureSeries_data
required: true
feature_units:
name: feature_units
description: Units of each feature.
multivalued: false
range: AbstractFeatureSeries_feature_units
required: false
features:
name: features
description: Description of the features represented in TimeSeries::data.
multivalued: false
range: AbstractFeatureSeries_features
required: true
AbstractFeatureSeries_data:
name: AbstractFeatureSeries_data
description: Values of each feature at each time.
attributes:
unit:
name: unit
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
range: text
array:
name: array
range: AbstractFeatureSeries_data_Array
AbstractFeatureSeries_data_Array:
name: AbstractFeatureSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_features:
name: num_features
range: numeric
required: false
AbstractFeatureSeries_feature_units:
name: AbstractFeatureSeries_feature_units
description: Units of each feature.
attributes:
array:
name: array
range: AbstractFeatureSeries_feature_units_Array
AbstractFeatureSeries_feature_units_Array:
name: AbstractFeatureSeries_feature_units_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
required: true
AbstractFeatureSeries_features:
name: AbstractFeatureSeries_features
description: Description of the features represented in TimeSeries::data.
attributes:
array:
name: array
range: AbstractFeatureSeries_features_Array
AbstractFeatureSeries_features_Array:
name: AbstractFeatureSeries_features_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
required: true
AnnotationSeries:
name: AnnotationSeries
description: Stores user annotations made during an experiment. The data[] field
stores a text array, and timestamps are stored for each annotation (ie, interval=1).
This is largely an alias to a standard TimeSeries storing a text array but that
is identifiable as storing annotations in a machine-readable way.
is_a: TimeSeries
attributes:
data:
name: data
description: Annotations made during an experiment.
multivalued: false
range: AnnotationSeries_data
required: true
AnnotationSeries_data:
name: AnnotationSeries_data
description: Annotations made during an experiment.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
array:
name: array
range: AnnotationSeries_data_Array
AnnotationSeries_data_Array:
name: AnnotationSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: text
required: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
and end of intervals. The data field stores whether the interval just started
(>0 value) or ended (<0 value). Different interval types can be represented
in the same series by using multiple key values (eg, 1 for feature A, 2 for
feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This
is largely an alias of a standard TimeSeries but that is identifiable as representing
time intervals in a machine-readable way.
is_a: TimeSeries
attributes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
multivalued: false
range: IntervalSeries_data
required: true
IntervalSeries_data:
name: IntervalSeries_data
description: Use values >0 if interval started, <0 if interval ended.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
array:
name: array
range: IntervalSeries_data_Array
IntervalSeries_data_Array:
name: IntervalSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: int8
required: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
is_a: TimeSeries
attributes:
data:
name: data
description: Data decomposed into frequency bands.
multivalued: false
range: DecompositionSeries_data
required: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
multivalued: false
range: DecompositionSeries_metric
required: true
source_channels:
name: source_channels
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
multivalued: false
range: DecompositionSeries_source_channels
required: false
bands:
name: bands
description: Table for describing the bands that this series was generated
from. There should be one row in this table for each band.
multivalued: false
range: DecompositionSeries_bands
required: true
DecompositionSeries_data:
name: DecompositionSeries_data
description: Data decomposed into frequency bands.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
array:
name: array
range: DecompositionSeries_data_Array
DecompositionSeries_data_Array:
name: DecompositionSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: false
num_channels:
name: num_channels
range: numeric
required: false
num_bands:
name: num_bands
range: numeric
required: false
DecompositionSeries_metric:
name: DecompositionSeries_metric
description: The metric used, e.g. phase, amplitude, power.
DecompositionSeries_source_channels:
name: DecompositionSeries_source_channels
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
is_a: DynamicTableRegion
DecompositionSeries_bands:
name: DecompositionSeries_bands
description: Table for describing the bands that this series was generated from.
There should be one row in this table for each band.
is_a: DynamicTable
attributes:
band_name:
name: band_name
description: Name of the band, e.g. theta.
multivalued: false
range: DecompositionSeries_bands_band_name
required: true
band_limits:
name: band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
multivalued: false
range: DecompositionSeries_bands_band_limits
required: true
band_mean:
name: band_mean
description: The mean Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries_bands_band_mean
required: true
band_stdev:
name: band_stdev
description: The standard deviation of Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries_bands_band_stdev
required: true
DecompositionSeries_bands_band_name:
name: DecompositionSeries_bands_band_name
description: Name of the band, e.g. theta.
is_a: VectorData
DecompositionSeries_bands_band_limits:
name: DecompositionSeries_bands_band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_limits_Array
DecompositionSeries_bands_band_limits_Array:
name: DecompositionSeries_bands_band_limits_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
required: false
low, high:
name: low, high
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
DecompositionSeries_bands_band_mean:
name: DecompositionSeries_bands_band_mean
description: The mean Gaussian filters, in Hz.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_mean_Array
DecompositionSeries_bands_band_mean_Array:
name: DecompositionSeries_bands_band_mean_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
required: true
DecompositionSeries_bands_band_stdev:
name: DecompositionSeries_bands_band_stdev
description: The standard deviation of Gaussian filters, in Hz.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_stdev_Array
DecompositionSeries_bands_band_stdev_Array:
name: DecompositionSeries_bands_band_stdev_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
required: true
Units:
name: Units
description: Data about spiking units. Event times of observed units (e.g. cell,
synapse, etc.) should be concatenated and stored in spike_times.
is_a: DynamicTable
attributes:
spike_times_index:
name: spike_times_index
description: Index into the spike_times dataset.
multivalued: false
range: Units_spike_times_index
required: false
spike_times:
name: spike_times
description: Spike times for each unit in seconds.
multivalued: false
range: Units_spike_times
required: false
obs_intervals_index:
name: obs_intervals_index
description: Index into the obs_intervals dataset.
multivalued: false
range: Units_obs_intervals_index
required: false
obs_intervals:
name: obs_intervals
description: Observation intervals for each unit.
multivalued: false
range: Units_obs_intervals
required: false
electrodes_index:
name: electrodes_index
description: Index into electrodes.
multivalued: false
range: Units_electrodes_index
required: false
electrodes:
name: electrodes
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
multivalued: false
range: Units_electrodes
required: false
electrode_group:
name: electrode_group
description: Electrode group that each spike unit came from.
multivalued: false
range: Units_electrode_group
required: false
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
multivalued: false
range: Units_waveform_mean
required: false
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
multivalued: false
range: Units_waveform_sd
required: false
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
a doubly indexed column. The 'waveforms_index' column indexes which waveforms
in this column belong to the same spike event for a given unit, where each
waveform was recorded from a different electrode. The 'waveforms_index_index'
column indexes the 'waveforms_index' column to indicate which spike events
belong to a given unit. For example, if the 'waveforms_index_index' column
has values [2, 5, 6], then the first 2 elements of the 'waveforms_index'
column correspond to the 2 spike events of the first unit, the next 3 elements
of the 'waveforms_index' column correspond to the 3 spike events of the
second unit, and the next 1 element of the 'waveforms_index' column corresponds
to the 1 spike event of the third unit. If the 'waveforms_index' column
has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms'
column contain the 3 spike waveforms that were recorded from 3 different
electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
for a graphical representation of this example. When there is only one electrode
for each unit (i.e., each spike time is associated with a single waveform),
then the 'waveforms_index' column will have values 1, 2, ..., N, where N
is the number of spike events. The number of electrodes for each spike event
should be the same within a given unit. The 'electrodes' column should be
used to indicate which electrodes are associated with each unit, and the
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
multivalued: false
range: Units_waveforms
required: false
waveforms_index:
name: waveforms_index
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
multivalued: false
range: Units_waveforms_index
required: false
waveforms_index_index:
name: waveforms_index_index
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
multivalued: false
range: Units_waveforms_index_index
required: false
Units_spike_times_index:
name: Units_spike_times_index
description: Index into the spike_times dataset.
is_a: VectorIndex
Units_spike_times:
name: Units_spike_times
description: Spike times for each unit in seconds.
is_a: VectorData
attributes:
resolution:
name: resolution
description: The smallest possible difference between two spike times. Usually
1 divided by the acquisition sampling rate from which spike times were extracted,
but could be larger if the acquisition time series was downsampled or smaller
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
Units_obs_intervals_index:
name: Units_obs_intervals_index
description: Index into the obs_intervals dataset.
is_a: VectorIndex
Units_obs_intervals:
name: Units_obs_intervals
description: Observation intervals for each unit.
is_a: VectorData
attributes:
array:
name: array
range: Units_obs_intervals_Array
Units_obs_intervals_Array:
name: Units_obs_intervals_Array
is_a: Arraylike
attributes:
num_intervals:
name: num_intervals
range: float64
required: false
start|end:
name: start|end
range: float64
required: false
minimum_cardinality: 2
maximum_cardinality: 2
Units_electrodes_index:
name: Units_electrodes_index
description: Index into electrodes.
is_a: VectorIndex
Units_electrodes:
name: Units_electrodes
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
is_a: DynamicTableRegion
Units_electrode_group:
name: Units_electrode_group
description: Electrode group that each spike unit came from.
is_a: VectorData
Units_waveform_mean:
name: Units_waveform_mean
description: Spike waveform mean for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveform_mean_Array
Units_waveform_mean_Array:
name: Units_waveform_mean_Array
is_a: Arraylike
attributes:
num_units:
name: num_units
range: float32
required: true
num_samples:
name: num_samples
range: float32
required: true
num_electrodes:
name: num_electrodes
range: float32
required: false
Units_waveform_sd:
name: Units_waveform_sd
description: Spike waveform standard deviation for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveform_sd_Array
Units_waveform_sd_Array:
name: Units_waveform_sd_Array
is_a: Arraylike
attributes:
num_units:
name: num_units
range: float32
required: true
num_samples:
name: num_samples
range: float32
required: true
num_electrodes:
name: num_electrodes
range: float32
required: false
Units_waveforms:
name: Units_waveforms
description: Individual waveforms for each spike on each electrode. This is a
doubly indexed column. The 'waveforms_index' column indexes which waveforms
in this column belong to the same spike event for a given unit, where each waveform
was recorded from a different electrode. The 'waveforms_index_index' column
indexes the 'waveforms_index' column to indicate which spike events belong to
a given unit. For example, if the 'waveforms_index_index' column has values
[2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
column correspond to the 3 spike events of the second unit, and the next 1 element
of the 'waveforms_index' column corresponds to the 1 spike event of the third
unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
that were recorded from 3 different electrodes for the first spike time of the
first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
for a graphical representation of this example. When there is only one electrode
for each unit (i.e., each spike time is associated with a single waveform),
then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
the number of spike events. The number of electrodes for each spike event should
be the same within a given unit. The 'electrodes' column should be used to indicate
which electrodes are associated with each unit, and the order of the waveforms
within a given unit x spike event should be in the same order as the electrodes
referenced in the 'electrodes' column of this table. The number of samples for
each waveform must be the same.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveforms_Array
Units_waveforms_Array:
name: Units_waveforms_Array
is_a: Arraylike
attributes:
num_waveforms:
name: num_waveforms
range: numeric
required: false
num_samples:
name: num_samples
range: numeric
required: false
Units_waveforms_index:
name: Units_waveforms_index
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
is_a: VectorIndex
Units_waveforms_index_index:
name: Units_waveforms_index_index
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
is_a: VectorIndex

View file

@ -0,0 +1,73 @@
name: core.nwb.ogen
id: core.nwb.ogen
imports:
- core.nwb.base
- nwb.language
default_prefix: core.nwb.ogen/
classes:
OptogeneticSeries:
name: OptogeneticSeries
description: An optogenetic stimulus.
is_a: TimeSeries
attributes:
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
multivalued: false
range: OptogeneticSeries_data
required: true
OptogeneticSeries_data:
name: OptogeneticSeries_data
description: Applied power for optogenetic stimulus, in watts.
attributes:
unit:
name: unit
description: Unit of measurement for data, which is fixed to 'watts'.
range: text
array:
name: array
range: OptogeneticSeries_data_Array
OptogeneticSeries_data_Array:
name: OptogeneticSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
is_a: NWBContainer
attributes:
description:
name: description
description: Description of stimulation site.
multivalued: false
range: OptogeneticStimulusSite_description
required: true
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
multivalued: false
range: OptogeneticStimulusSite_excitation_lambda
required: true
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: OptogeneticStimulusSite_location
required: true
OptogeneticStimulusSite_description:
name: OptogeneticStimulusSite_description
description: Description of stimulation site.
OptogeneticStimulusSite_excitation_lambda:
name: OptogeneticStimulusSite_excitation_lambda
description: Excitation wavelength, in nm.
OptogeneticStimulusSite_location:
name: OptogeneticStimulusSite_location
description: Location of the stimulation site. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard
atlas names for anatomical regions when possible.

View file

@ -0,0 +1,571 @@
name: core.nwb.ophys
id: core.nwb.ophys
imports:
- core.nwb.image
- core.nwb.base
- hdmf-common.table
- nwb.language
default_prefix: core.nwb.ophys/
classes:
OnePhotonSeries:
name: OnePhotonSeries
description: Image stack recorded over time from 1-photon microscope.
is_a: ImageSeries
attributes:
pmt_gain:
name: pmt_gain
description: Photomultiplier gain.
range: float32
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
exposure_time:
name: exposure_time
description: Exposure time of the sample; often the inverse of the frequency.
range: float32
binning:
name: binning
description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
range: uint8
power:
name: power
description: Power of the excitation in mW, if known.
range: float32
intensity:
name: intensity
description: Intensity of the excitation in mW/mm^2, if known.
range: float32
TwoPhotonSeries:
name: TwoPhotonSeries
description: Image stack recorded over time from 2-photon microscope.
is_a: ImageSeries
attributes:
pmt_gain:
name: pmt_gain
description: Photomultiplier gain.
range: float32
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
multivalued: false
range: TwoPhotonSeries_field_of_view
required: false
TwoPhotonSeries_field_of_view:
name: TwoPhotonSeries_field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: TwoPhotonSeries_field_of_view_Array
TwoPhotonSeries_field_of_view_Array:
name: TwoPhotonSeries_field_of_view_Array
is_a: Arraylike
attributes:
width|height:
name: width|height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width|height|depth:
name: width|height|depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
RoiResponseSeries:
name: RoiResponseSeries
description: ROI responses over an imaging plane. The first dimension represents
time. The second dimension, if present, represents ROIs.
is_a: TimeSeries
attributes:
data:
name: data
description: Signals from ROIs.
multivalued: false
range: RoiResponseSeries_data
required: true
rois:
name: rois
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
multivalued: false
range: RoiResponseSeries_rois
required: true
RoiResponseSeries_data:
name: RoiResponseSeries_data
description: Signals from ROIs.
attributes:
array:
name: array
range: RoiResponseSeries_data_Array
RoiResponseSeries_data_Array:
name: RoiResponseSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_ROIs:
name: num_ROIs
range: numeric
required: false
RoiResponseSeries_rois:
name: RoiResponseSeries_rois
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
is_a: DynamicTableRegion
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
of dF/F should be the same as for segmentation (i.e., same names for ROIs and
for image planes).
is_a: NWBDataInterface
attributes:
RoiResponseSeries:
name: RoiResponseSeries
description: RoiResponseSeries object(s) containing dF/F for a ROI.
multivalued: true
range: RoiResponseSeries
required: true
Fluorescence:
name: Fluorescence
description: Fluorescence information about a region of interest (ROI). Storage
hierarchy of fluorescence should be the same as for segmentation (ie, same names
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
RoiResponseSeries:
name: RoiResponseSeries
description: RoiResponseSeries object(s) containing fluorescence data for
a ROI.
multivalued: true
range: RoiResponseSeries
required: true
ImageSegmentation:
name: ImageSegmentation
description: Stores pixels in an image that represent different regions of interest
(ROIs) or masks. All segmentation for a given imaging plane is stored together,
with storage for multiple imaging planes (masks) supported. Each ROI is stored
in its own subgroup, with the ROI group containing both a 2D mask and a list
of pixels that make up this mask. Segments can also be used for masking neuropil.
If segmentation is allowed to change with time, a new imaging plane (or module)
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
PlaneSegmentation:
name: PlaneSegmentation
description: Results from image segmentation of a specific imaging plane.
multivalued: true
range: PlaneSegmentation
required: true
PlaneSegmentation:
name: PlaneSegmentation
description: Results from image segmentation of a specific imaging plane.
is_a: DynamicTable
attributes:
image_mask:
name: image_mask
description: ROI masks for each ROI. Each image mask is the size of the original
imaging plane (or volume) and members of the ROI are finite non-zero.
multivalued: false
range: PlaneSegmentation_image_mask
required: false
pixel_mask_index:
name: pixel_mask_index
description: Index into pixel_mask.
multivalued: false
range: PlaneSegmentation_pixel_mask_index
required: false
pixel_mask:
name: pixel_mask
description: 'Pixel masks for each ROI: a list of indices and weights for
the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
multivalued: false
range: PlaneSegmentation_pixel_mask
required: false
voxel_mask_index:
name: voxel_mask_index
description: Index into voxel_mask.
multivalued: false
range: PlaneSegmentation_voxel_mask_index
required: false
voxel_mask:
name: voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for
the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
multivalued: false
range: PlaneSegmentation_voxel_mask
required: false
reference_images:
name: reference_images
description: Image stacks that the segmentation masks apply to.
multivalued: false
range: PlaneSegmentation_reference_images
required: true
PlaneSegmentation_image_mask:
name: PlaneSegmentation_image_mask
description: ROI masks for each ROI. Each image mask is the size of the original
imaging plane (or volume) and members of the ROI are finite non-zero.
is_a: VectorData
attributes:
array:
name: array
range: PlaneSegmentation_image_mask_Array
PlaneSegmentation_image_mask_Array:
name: PlaneSegmentation_image_mask_Array
is_a: Arraylike
attributes:
num_roi:
name: num_roi
range: AnyType
required: true
num_x:
name: num_x
range: AnyType
required: true
num_y:
name: num_y
range: AnyType
required: true
num_z:
name: num_z
range: AnyType
required: false
PlaneSegmentation_pixel_mask_index:
name: PlaneSegmentation_pixel_mask_index
description: Index into pixel_mask.
is_a: VectorIndex
PlaneSegmentation_pixel_mask:
name: PlaneSegmentation_pixel_mask
description: 'Pixel masks for each ROI: a list of indices and weights for the
ROI. Pixel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
is_a: VectorData
PlaneSegmentation_voxel_mask_index:
name: PlaneSegmentation_voxel_mask_index
description: Index into voxel_mask.
is_a: VectorIndex
PlaneSegmentation_voxel_mask:
name: PlaneSegmentation_voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
is_a: VectorData
PlaneSegmentation_reference_images:
name: PlaneSegmentation_reference_images
description: Image stacks that the segmentation masks apply to.
attributes:
ImageSeries:
name: ImageSeries
description: One or more image stacks that the masks apply to (can be one-element
stack).
multivalued: true
range: ImageSeries
required: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
description:
name: description
description: Description of the imaging plane.
multivalued: false
range: ImagingPlane_description
required: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
multivalued: false
range: ImagingPlane_excitation_lambda
required: true
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
multivalued: false
range: ImagingPlane_imaging_rate
required: false
indicator:
name: indicator
description: Calcium indicator.
multivalued: false
range: ImagingPlane_indicator
required: true
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: ImagingPlane_location
required: true
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
multivalued: false
range: ImagingPlane_manifold
required: false
origin_coords:
name: origin_coords
description: Physical location of the first element of the imaging plane (0,
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
multivalued: false
range: ImagingPlane_origin_coords
required: false
grid_spacing:
name: grid_spacing
description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
multivalued: false
range: ImagingPlane_grid_spacing
required: false
reference_frame:
name: reference_frame
description: Describes reference frame of origin_coords and grid_spacing.
For example, this can be a text description of the anatomical location and
orientation of the grid defined by origin_coords and grid_spacing or the
vectors needed to transform or rotate the grid to a common anatomical axis
(e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
grid_spacing. If origin_coords and grid_spacing are not present, then this
field is not required. For example, if the microscope takes 10 x 10 x 2
images, where the first value of the data matrix (index (0, 0, 0)) corresponds
to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
more anterior, larger numbers in y means more rightward, and larger numbers
in z means more ventral, then enter the following -- origin_coords = (-1.2,
-0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
are relative to bregma. First dimension corresponds to anterior-posterior
axis (larger index = more anterior). Second dimension corresponds to medial-lateral
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
multivalued: false
range: ImagingPlane_reference_frame
required: false
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
multivalued: true
range: OpticalChannel
required: true
ImagingPlane_description:
name: ImagingPlane_description
description: Description of the imaging plane.
ImagingPlane_excitation_lambda:
name: ImagingPlane_excitation_lambda
description: Excitation wavelength, in nm.
ImagingPlane_imaging_rate:
name: ImagingPlane_imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
ImagingPlane_indicator:
name: ImagingPlane_indicator
description: Calcium indicator.
ImagingPlane_location:
name: ImagingPlane_location
description: Location of the imaging plane. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard
atlas names for anatomical regions when possible.
ImagingPlane_manifold:
name: ImagingPlane_manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents the
position of the pixel relative to the defined coordinate space. Deprecated in
favor of origin_coords and grid_spacing.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as pixels from
x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
the 'conversion' multiplier to get from raw data acquisition pixel units
to meters is 2/1000.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_manifold_Array
ImagingPlane_manifold_Array:
name: ImagingPlane_manifold_Array
is_a: Arraylike
attributes:
height:
name: height
range: float32
required: true
width:
name: width
range: float32
required: true
x, y, z:
name: x, y, z
range: float32
required: true
minimum_cardinality: 3
maximum_cardinality: 3
depth:
name: depth
range: float32
required: false
ImagingPlane_origin_coords:
name: ImagingPlane_origin_coords
description: Physical location of the first element of the imaging plane (0, 0)
for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
physical location is relative to (e.g., bregma).
attributes:
unit:
name: unit
description: Measurement units for origin_coords. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_origin_coords_Array
ImagingPlane_origin_coords_Array:
name: ImagingPlane_origin_coords_Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
ImagingPlane_grid_spacing:
name: ImagingPlane_grid_spacing
description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
to interpret the grid.
attributes:
unit:
name: unit
description: Measurement units for grid_spacing. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_grid_spacing_Array
ImagingPlane_grid_spacing_Array:
name: ImagingPlane_grid_spacing_Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
ImagingPlane_reference_frame:
name: ImagingPlane_reference_frame
description: Describes reference frame of origin_coords and grid_spacing. For
example, this can be a text description of the anatomical location and orientation
of the grid defined by origin_coords and grid_spacing or the vectors needed
to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML).
This field is necessary to interpret origin_coords and grid_spacing. If origin_coords
and grid_spacing are not present, then this field is not required. For example,
if the microscope takes 10 x 10 x 2 images, where the first value of the data
matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma,
the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and
larger numbers in x means more anterior, larger numbers in y means more rightward,
and larger numbers in z means more ventral, then enter the following -- origin_coords
= (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin
coordinates are relative to bregma. First dimension corresponds to anterior-posterior
axis (larger index = more anterior). Second dimension corresponds to medial-lateral
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
is_a: NWBContainer
attributes:
description:
name: description
description: Description or other notes about the channel.
multivalued: false
range: OpticalChannel_description
required: true
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
multivalued: false
range: OpticalChannel_emission_lambda
required: true
OpticalChannel_description:
name: OpticalChannel_description
description: Description or other notes about the channel.
OpticalChannel_emission_lambda:
name: OpticalChannel_emission_lambda
description: Emission wavelength for channel, in nm.
MotionCorrection:
name: MotionCorrection
description: 'An image stack where all frames are shifted (registered) to a common
coordinate system, to account for movement and drift between frames. Note: each
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
CorrectedImageStack:
name: CorrectedImageStack
description: Reuslts from motion correction of an image stack.
multivalued: true
range: CorrectedImageStack
required: true
CorrectedImageStack:
name: CorrectedImageStack
description: Reuslts from motion correction of an image stack.
is_a: NWBDataInterface
attributes:
corrected:
name: corrected
description: Image stack with frames shifted to the common coordinates.
multivalued: false
range: CorrectedImageStack_corrected
required: true
xy_translation:
name: xy_translation
description: Stores the x,y delta necessary to align each frame to the common
coordinates, for example, to align each frame to a reference image.
multivalued: false
range: CorrectedImageStack_xy_translation
required: true
CorrectedImageStack_corrected:
name: CorrectedImageStack_corrected
description: Image stack with frames shifted to the common coordinates.
is_a: ImageSeries
CorrectedImageStack_xy_translation:
name: CorrectedImageStack_xy_translation
description: Stores the x,y delta necessary to align each frame to the common
coordinates, for example, to align each frame to a reference image.
is_a: TimeSeries

View file

@ -0,0 +1,333 @@
name: core.nwb.retinotopy
id: core.nwb.retinotopy
imports:
- core.nwb.base
- nwb.language
default_prefix: core.nwb.retinotopy/
classes:
ImagingRetinotopy:
name: ImagingRetinotopy
description: 'Intrinsic signal optical imaging or widefield imaging for measuring
retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of
responses to specific stimuli and a combined polarity map from which to identify
visual areas. This group does not store the raw responses imaged during retinotopic
mapping or the stimuli presented, but rather the resulting phase and power maps
after applying a Fourier transform on the averaged responses. Note: for data
consistency, all images and arrays are stored in the format [row][column] and
[row, col], which equates to [y][x]. Field of view and dimension arrays may
appear backward (i.e., y before x).'
is_a: NWBDataInterface
attributes:
axis_1_phase_map:
name: axis_1_phase_map
description: Phase response to stimulus on the first measured axis.
multivalued: false
range: ImagingRetinotopy_axis_1_phase_map
required: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
multivalued: false
range: ImagingRetinotopy_axis_1_power_map
required: false
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
multivalued: false
range: ImagingRetinotopy_axis_2_phase_map
required: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
multivalued: false
range: ImagingRetinotopy_axis_2_power_map
required: false
axis_descriptions:
name: axis_descriptions
description: Two-element array describing the contents of the two response
axis fields. Description should be something like ['altitude', 'azimuth']
or '['radius', 'theta'].
multivalued: false
range: ImagingRetinotopy_axis_descriptions
required: true
focal_depth_image:
name: focal_depth_image
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
multivalued: false
range: ImagingRetinotopy_focal_depth_image
required: false
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
multivalued: false
range: ImagingRetinotopy_sign_map
required: false
vasculature_image:
name: vasculature_image
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
multivalued: false
range: ImagingRetinotopy_vasculature_image
required: true
ImagingRetinotopy_axis_1_phase_map:
name: ImagingRetinotopy_axis_1_phase_map
description: Phase response to stimulus on the first measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_1_phase_map_Array
ImagingRetinotopy_axis_1_phase_map_Array:
name: ImagingRetinotopy_axis_1_phase_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_1_power_map:
name: ImagingRetinotopy_axis_1_power_map
description: Power response on the first measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_1_power_map_Array
ImagingRetinotopy_axis_1_power_map_Array:
name: ImagingRetinotopy_axis_1_power_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_2_phase_map:
name: ImagingRetinotopy_axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_2_phase_map_Array
ImagingRetinotopy_axis_2_phase_map_Array:
name: ImagingRetinotopy_axis_2_phase_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_2_power_map:
name: ImagingRetinotopy_axis_2_power_map
description: Power response on the second measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_2_power_map_Array
ImagingRetinotopy_axis_2_power_map_Array:
name: ImagingRetinotopy_axis_2_power_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_descriptions:
name: ImagingRetinotopy_axis_descriptions
description: Two-element array describing the contents of the two response axis
fields. Description should be something like ['altitude', 'azimuth'] or '['radius',
'theta'].
attributes:
array:
name: array
range: ImagingRetinotopy_axis_descriptions_Array
ImagingRetinotopy_axis_descriptions_Array:
name: ImagingRetinotopy_axis_descriptions_Array
is_a: Arraylike
attributes:
axis_1, axis_2:
name: axis_1, axis_2
range: text
required: true
minimum_cardinality: 2
maximum_cardinality: 2
ImagingRetinotopy_focal_depth_image:
name: ImagingRetinotopy_focal_depth_image
description: 'Gray-scale image taken with same settings/parameters (e.g., focal
depth, wavelength) as data collection. Array format: [rows][columns].'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy_focal_depth_image_Array
ImagingRetinotopy_focal_depth_image_Array:
name: ImagingRetinotopy_focal_depth_image_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false
ImagingRetinotopy_sign_map:
name: ImagingRetinotopy_sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
array:
name: array
range: ImagingRetinotopy_sign_map_Array
ImagingRetinotopy_sign_map_Array:
name: ImagingRetinotopy_sign_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_vasculature_image:
name: ImagingRetinotopy_vasculature_image
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy_vasculature_image_Array
ImagingRetinotopy_vasculature_image_Array:
name: ImagingRetinotopy_vasculature_image_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false

View file

@ -0,0 +1,18 @@
name: core
description: NWB namespace
id: core
version: 2.6.0-alpha
imports:
- core.nwb.base
- core.nwb.device
- core.nwb.epoch
- core.nwb.image
- core.nwb.file
- core.nwb.misc
- core.nwb.behavior
- core.nwb.ecephys
- core.nwb.icephys
- core.nwb.ogen
- core.nwb.ophys
- core.nwb.retinotopy
default_prefix: core/

View file

@ -0,0 +1,30 @@
name: hdmf-common.base
id: hdmf-common.base
imports:
- nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
Data:
name: Data
description: Data objects held within this SimpleMultiContainer.
multivalued: true
range: Data
required: false
Container:
name: Container
description: Container objects held within this SimpleMultiContainer.
multivalued: true
range: Container
required: false

View file

@ -0,0 +1,82 @@
name: hdmf-common.sparse
id: hdmf-common.sparse
imports:
- hdmf-common.base
- nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
indices:
name: indices
description: The column indices.
multivalued: false
range: CSRMatrix_indices
required: true
indptr:
name: indptr
description: The row index pointer.
multivalued: false
range: CSRMatrix_indptr
required: true
data:
name: data
description: The non-zero values in the matrix.
multivalued: false
range: CSRMatrix_data
required: true
CSRMatrix_indices:
name: CSRMatrix_indices
description: The column indices.
attributes:
array:
name: array
range: CSRMatrix_indices_Array
CSRMatrix_indices_Array:
name: CSRMatrix_indices_Array
is_a: Arraylike
attributes:
number of non-zero values:
name: number of non-zero values
range: uint
required: true
CSRMatrix_indptr:
name: CSRMatrix_indptr
description: The row index pointer.
attributes:
array:
name: array
range: CSRMatrix_indptr_Array
CSRMatrix_indptr_Array:
name: CSRMatrix_indptr_Array
is_a: Arraylike
attributes:
number of rows in the matrix + 1:
name: number of rows in the matrix + 1
range: uint
required: true
CSRMatrix_data:
name: CSRMatrix_data
description: The non-zero values in the matrix.
attributes:
array:
name: array
range: CSRMatrix_data_Array
CSRMatrix_data_Array:
name: CSRMatrix_data_Array
is_a: Arraylike
attributes:
number of non-zero values:
name: number of non-zero values
range: AnyType
required: true

View file

@ -0,0 +1,209 @@
name: hdmf-common.table
id: hdmf-common.table
imports:
- hdmf-common.base
- nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
description:
name: description
description: Description of what these vectors represent.
range: text
array:
name: array
range: VectorData_Array
VectorData_Array:
name: VectorData_Array
is_a: Arraylike
attributes:
dim0:
name: dim0
range: AnyType
required: true
dim1:
name: dim1
range: AnyType
required: false
dim2:
name: dim2
range: AnyType
required: false
dim3:
name: dim3
range: AnyType
required: false
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
array:
name: array
range: VectorIndex_Array
VectorIndex_Array:
name: VectorIndex_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint8
required: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
array:
name: array
range: ElementIdentifiers_Array
ElementIdentifiers_Array:
name: ElementIdentifiers_Array
is_a: Arraylike
attributes:
num_elements:
name: num_elements
range: int
required: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
description:
name: description
description: Description of what this table region points to.
range: text
array:
name: array
range: DynamicTableRegion_Array
DynamicTableRegion_Array:
name: DynamicTableRegion_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: int
required: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
description:
name: description
description: Description of what is in this dynamic table.
range: text
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
multivalued: false
range: DynamicTable_id
required: true
VectorData:
name: VectorData
description: Vector columns, including index columns, of this dynamic table.
multivalued: true
range: VectorData
required: false
DynamicTable_id:
name: DynamicTable_id
description: Array of unique identifiers for the rows of this dynamic table.
is_a: ElementIdentifiers
attributes:
array:
name: array
range: DynamicTable_id_Array
DynamicTable_id_Array:
name: DynamicTable_id_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: int
required: true
AlignedDynamicTable:
name: AlignedDynamicTable
description: DynamicTable container that supports storing a collection of sub-tables.
Each sub-table is a DynamicTable itself that is aligned with the main table
by row index. I.e., all DynamicTables stored in this group MUST have the same
number of rows. This type effectively defines a 2-level table in which the main
data is stored in the main table implemented by this type and additional columns
of the table are grouped into categories, with each category being represented
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
categories:
name: categories
description: The names of the categories in this AlignedDynamicTable. Each
category is represented by one DynamicTable stored in the parent group.
This attribute should be used to specify an order of categories and the
category names must match the names of the corresponding DynamicTable in
the group.
range: text
DynamicTable:
name: DynamicTable
description: A DynamicTable representing a particular category for columns
in the AlignedDynamicTable parent container. The table MUST be aligned with
(i.e., have the same number of rows) as all other DynamicTables stored in
the AlignedDynamicTable parent container. The name of the category is given
by the name of the DynamicTable and its description by the description attribute
of the DynamicTable.
multivalued: true
range: DynamicTable
required: false

View file

@ -0,0 +1,9 @@
name: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.8.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
default_prefix: hdmf-common/

View file

@ -0,0 +1,18 @@
name: hdmf-experimental.experimental
id: hdmf-experimental.experimental
imports:
- hdmf-common.table
- nwb.language
default_prefix: hdmf-experimental.experimental/
classes:
EnumData:
name: EnumData
description: Data that come from a fixed set of values. A data value of i corresponds
to the i-th value in the VectorData referenced by the 'elements' attribute.
is_a: VectorData
attributes:
elements:
name: elements
description: Reference to the VectorData object that contains the enumerable
elements
range: VectorData

View file

@ -0,0 +1,149 @@
name: hdmf-experimental.resources
id: hdmf-experimental.resources
imports:
- hdmf-common.base
- nwb.language
default_prefix: hdmf-experimental.resources/
classes:
HERD:
name: HERD
description: HDMF External Resources Data Structure. A set of six tables for tracking
external resource references in a file or across multiple files.
is_a: Container
attributes:
keys:
name: keys
description: A table for storing user terms that are used to refer to external
resources.
multivalued: false
range: HERD_keys
required: true
files:
name: files
description: A table for storing object ids of files used in external resources.
multivalued: false
range: HERD_files
required: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
multivalued: false
range: HERD_entities
required: true
objects:
name: objects
description: A table for identifying which objects in a file contain references
to external resources.
multivalued: false
range: HERD_objects
required: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
multivalued: false
range: HERD_object_keys
required: true
entity_keys:
name: entity_keys
description: A table for identifying which keys use which entity.
multivalued: false
range: HERD_entity_keys
required: true
HERD_keys:
name: HERD_keys
description: A table for storing user terms that are used to refer to external
resources.
is_a: Data
attributes:
array:
name: array
range: HERD_keys_Array
HERD_keys_Array:
name: HERD_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_files:
name: HERD_files
description: A table for storing object ids of files used in external resources.
is_a: Data
attributes:
array:
name: array
range: HERD_files_Array
HERD_files_Array:
name: HERD_files_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_entities:
name: HERD_entities
description: A table for mapping user terms (i.e., keys) to resource entities.
is_a: Data
attributes:
array:
name: array
range: HERD_entities_Array
HERD_entities_Array:
name: HERD_entities_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_objects:
name: HERD_objects
description: A table for identifying which objects in a file contain references
to external resources.
is_a: Data
attributes:
array:
name: array
range: HERD_objects_Array
HERD_objects_Array:
name: HERD_objects_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_object_keys:
name: HERD_object_keys
description: A table for identifying which objects use which keys.
is_a: Data
attributes:
array:
name: array
range: HERD_object_keys_Array
HERD_object_keys_Array:
name: HERD_object_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_entity_keys:
name: HERD_entity_keys
description: A table for identifying which keys use which entity.
is_a: Data
attributes:
array:
name: array
range: HERD_entity_keys_Array
HERD_entity_keys_Array:
name: HERD_entity_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true

View file

@ -0,0 +1,9 @@
name: hdmf-experimental
description: Experimental data structures provided by HDMF. These are not guaranteed
to be available in the future.
id: hdmf-experimental
version: 0.5.0
imports:
- hdmf-experimental.experimental
- hdmf-experimental.resources
default_prefix: hdmf-experimental/

View file

@ -0,0 +1,154 @@
name: nwb.language
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float:
name: float
typeof: float
float32:
name: float32
typeof: float
double:
name: double
typeof: double
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: date
enums:
FlatDType:
name: FlatDType
permissible_values:
float:
text: float
float32:
text: float32
double:
text: double
float64:
text: float64
long:
text: long
int64:
text: int64
int:
text: int
int32:
text: int32
int16:
text: int16
short:
text: short
int8:
text: int8
uint:
text: uint
uint32:
text: uint32
uint16:
text: uint16
uint8:
text: uint8
uint64:
text: uint64
numeric:
text: numeric
text:
text: text
utf:
text: utf
utf8:
text: utf8
utf_8:
text: utf_8
ascii:
text: ascii
bool:
text: bool
isodatetime:
text: isodatetime
classes:
Arraylike:
name: Arraylike
description: Container for arraylike information held in the dims, shape, and
dtype properties.this is a special case to be interpreted by downstream i/o.
this class has no slotsand is abstract by default.- Each slot within a subclass
indicates a possible dimension.- Only dimensions that are present in all the
dimension specifiers in the original schema are required.- Shape requirements
are indicated using max/min cardinalities on the slot.
abstract: true
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -253,7 +253,7 @@ groups:
dtype: text
doc: Description of this collection of images.
datasets:
# - neurodata_type_inc: Image
- neurodata_type_inc: Image
doc: Images stored in this collection.
quantity: '+'
- name: order_of_images

216
poetry.lock generated
View file

@ -1,5 +1,20 @@
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
[[package]]
name = "ansi2html"
version = "1.8.0"
description = ""
optional = false
python-versions = ">=3.6"
files = [
{file = "ansi2html-1.8.0-py3-none-any.whl", hash = "sha256:ef9cc9682539dbe524fbf8edad9c9462a308e04bce1170c32daa8fdfd0001785"},
{file = "ansi2html-1.8.0.tar.gz", hash = "sha256:38b82a298482a1fa2613f0f9c9beb3db72a8f832eeac58eb2e47bf32cd37f6d5"},
]
[package.extras]
docs = ["Sphinx", "setuptools-scm", "sphinx-rtd-theme"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "antlr4-python3-runtime"
version = "4.9.3"
@ -210,6 +225,86 @@ pandas = ["pandas"]
rdflib = ["rdflib"]
tests = ["coverage", "pytest"]
[[package]]
name = "dash"
version = "2.12.1"
description = "A Python framework for building reactive web-apps. Developed by Plotly."
optional = false
python-versions = ">=3.6"
files = [
{file = "dash-2.12.1-py3-none-any.whl", hash = "sha256:23fcde95e59e353c34712c8fa3e90e784a7247a9e5f6ef47e467add10b7e91ab"},
{file = "dash-2.12.1.tar.gz", hash = "sha256:c7d3dccafff2d041a371dcf5bbb2a1701a38ca178c12dce93e64207e3aecbaeb"},
]
[package.dependencies]
ansi2html = "*"
dash-core-components = "2.0.0"
dash-html-components = "2.0.0"
dash-table = "5.0.0"
Flask = ">=1.0.4,<2.3.0"
nest-asyncio = "*"
plotly = ">=5.0.0"
requests = "*"
retrying = "*"
setuptools = "*"
typing-extensions = ">=4.1.1"
Werkzeug = "<2.3.0"
[package.extras]
celery = ["celery[redis] (>=5.1.2)", "importlib-metadata (<5)", "redis (>=3.5.3)"]
ci = ["black (==21.6b0)", "black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==3.9.2)", "flaky (==3.7.0)", "flask-talisman (==1.0.0)", "isort (==4.3.21)", "jupyterlab (<4.0.0)", "mimesis", "mock (==4.0.3)", "numpy", "openpyxl", "orjson (==3.5.4)", "orjson (==3.6.7)", "pandas (==1.1.5)", "pandas (>=1.4.0)", "preconditions", "pyarrow", "pyarrow (<3)", "pylint (==2.13.5)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "xlrd (<2)", "xlrd (>=2.0.1)"]
compress = ["flask-compress"]
dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
[[package]]
name = "dash-core-components"
version = "2.0.0"
description = "Core component suite for Dash"
optional = false
python-versions = "*"
files = [
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
]
[[package]]
name = "dash-cytoscape"
version = "0.3.0"
description = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js"
optional = false
python-versions = "*"
files = [
{file = "dash_cytoscape-0.3.0-py3-none-any.whl", hash = "sha256:718dc1568b9e7bfe7f64376aa903c64a1a1fe6daed4e559b254456f18dd3135f"},
{file = "dash_cytoscape-0.3.0.tar.gz", hash = "sha256:a71ad4fe095570b71d4ad7c0d29199e9780c2e6796173d3b25fccc2cc58c855f"},
]
[package.dependencies]
dash = "*"
[[package]]
name = "dash-html-components"
version = "2.0.0"
description = "Vanilla HTML components for Dash"
optional = false
python-versions = "*"
files = [
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
]
[[package]]
name = "dash-table"
version = "5.0.0"
description = "Dash table"
optional = false
python-versions = "*"
files = [
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
]
[[package]]
name = "decorator"
version = "5.1.1"
@ -249,6 +344,27 @@ files = [
{file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
]
[[package]]
name = "flask"
version = "2.2.5"
description = "A simple framework for building complex web applications."
optional = false
python-versions = ">=3.7"
files = [
{file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"},
{file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"},
]
[package.dependencies]
click = ">=8.0"
itsdangerous = ">=2.0"
Jinja2 = ">=3.0"
Werkzeug = ">=2.2.2"
[package.extras]
async = ["asgiref (>=3.2)"]
dotenv = ["python-dotenv"]
[[package]]
name = "fqdn"
version = "1.5.1"
@ -443,6 +559,17 @@ files = [
[package.dependencies]
arrow = ">=0.15.0"
[[package]]
name = "itsdangerous"
version = "2.1.2"
description = "Safely pass data to untrusted environments and back."
optional = false
python-versions = ">=3.7"
files = [
{file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
{file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
]
[[package]]
name = "jinja2"
version = "3.1.2"
@ -760,6 +887,17 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "nest-asyncio"
version = "1.5.7"
description = "Patch asyncio to allow nested event loops"
optional = false
python-versions = ">=3.5"
files = [
{file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"},
{file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"},
]
[[package]]
name = "networkx"
version = "3.1"
@ -834,6 +972,21 @@ files = [
{file = "parse-1.19.1.tar.gz", hash = "sha256:cc3a47236ff05da377617ddefa867b7ba983819c664e1afe46249e5b469be464"},
]
[[package]]
name = "plotly"
version = "5.16.1"
description = "An open-source, interactive data visualization library for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "plotly-5.16.1-py2.py3-none-any.whl", hash = "sha256:19cc34f339acd4e624177806c14df22f388f23fb70658b03aad959a0e650a0dc"},
{file = "plotly-5.16.1.tar.gz", hash = "sha256:295ac25edeb18c893abb71dcadcea075b78fd6fdf07cee4217a4e1009667925b"},
]
[package.dependencies]
packaging = "*"
tenacity = ">=6.2.0"
[[package]]
name = "pluggy"
version = "1.2.0"
@ -1247,6 +1400,20 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "retrying"
version = "1.3.4"
description = "Retrying"
optional = false
python-versions = "*"
files = [
{file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"},
{file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"},
]
[package.dependencies]
six = ">=1.7.0"
[[package]]
name = "rfc3339-validator"
version = "0.1.4"
@ -1460,6 +1627,22 @@ files = [
{file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"},
]
[[package]]
name = "setuptools"
version = "68.1.2"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"},
{file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "shexjsg"
version = "0.8.2"
@ -1610,6 +1793,20 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "tenacity"
version = "8.2.3"
description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.7"
files = [
{file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"},
{file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"},
]
[package.extras]
doc = ["reno", "sphinx", "tornado (>=4.5)"]
[[package]]
name = "typing-extensions"
version = "4.7.1"
@ -1706,6 +1903,23 @@ files = [
docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"]
tests = ["pytest", "pytest-cov"]
[[package]]
name = "werkzeug"
version = "2.2.3"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.7"
files = [
{file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"},
{file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"},
]
[package.dependencies]
MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog"]
[[package]]
name = "wrapt"
version = "1.15.0"
@ -1808,4 +2022,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "debbeeaba69d6afc3da329ccc76e0c2ae3124773b85a577a10cb5e673845a9e5"
content-hash = "8b70e71931e2f519212b40f0dfcb9cc597e9f4894dbf392a0232df9060a35ee1"

View file

@ -19,11 +19,20 @@ pydantic = "<2"
rich = "^13.5.2"
linkml = "^1.5.7"
[tool.poetry.group.tests]
optional = true
[tool.poetry.group.dev.dependencies]
[tool.poetry.group.tests.dependencies]
pytest = "^7.4.0"
pytest-depends = "^1.0.1"
[tool.poetry.group.plot]
optional = true
[tool.poetry.group.plot.dependencies]
dash = "^2.12.1"
dash-cytoscape = "^0.3.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

62
scripts/generate_core.py Normal file
View file

@ -0,0 +1,62 @@
from argparse import ArgumentParser
from pathlib import Path
from linkml_runtime.dumpers import yaml_dumper
from linkml.generators import PydanticGenerator
from nwb_linkml import io
def generate_core_yaml(output_path:Path):
core = io.load_nwb_core()
built_schemas = core.build().schemas
for schema in built_schemas:
output_file = output_path / (schema.name + '.yaml')
yaml_dumper.dump(schema, output_file)
def generate_core_pydantic(yaml_path:Path, output_path:Path):
for schema in yaml_path.glob('*.yaml'):
pydantic_file = (output_path / schema.name).with_suffix('.py')
generator = PydanticGenerator(
str(schema),
pydantic_version='1',
emit_metadata=True,
gen_classvars=True,
gen_slots=True
)
gen_pydantic = generator.serialize()
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)
def parser() -> ArgumentParser:
parser = ArgumentParser('Generate NWB core schema')
parser.add_argument(
'--yaml',
help="directory to export linkML schema to",
type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'schema'
)
parser.add_argument(
'--pydantic',
help="directory to export pydantic models",
type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'models'
)
return parser
def main():
args = parser().parse_args()
args.yaml.mkdir(exist_ok=True)
args.pydantic.mkdir(exist_ok=True)
generate_core_yaml(args.yaml)
generate_core_pydantic(args.yaml, args.pydantic)
if __name__ == "__main__":
main()

View file

@ -21,17 +21,21 @@ def test_generate_core(nwb_core_fixture, tmp_output_dir):
@pytest.mark.depends(on=['test_generate_core'])
def test_generate_pydantic(tmp_output_dir):
core_file = tmp_output_dir / 'core.yaml'
pydantic_file = tmp_output_dir / 'core.py'
generator = PydanticGenerator(
str(core_file),
pydantic_version='1',
emit_metadata=True,
gen_classvars=True,
gen_slots=True
)
gen_pydantic = generator.serialize()
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)
# core_file = tmp_output_dir / 'core.yaml'
# pydantic_file = tmp_output_dir / 'core.py'
for schema in tmp_output_dir.glob('*.yaml'):
pydantic_file = (schema.parent / schema.name).with_suffix('.py')
generator = PydanticGenerator(
str(schema),
pydantic_version='1',
emit_metadata=True,
gen_classvars=True,
gen_slots=True
)
gen_pydantic = generator.serialize()
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)