Cleaner code generation, npytyping type hints for arrays

- split off generated subclasses into "include" files, not sure if that's good, but in any case we have them separable now.
- more work on cleanly un-nesting scalar and 1D-vector data into attributes and lists, respectively
- brought the pydantic generator in-repo to do a bunch of overrides
This commit is contained in:
sneakers-the-rat 2023-08-28 22:16:58 -07:00
parent 63c6cef10b
commit be21325123
100 changed files with 7897 additions and 19659 deletions

View file

@ -80,9 +80,11 @@ class Adapter(BaseModel):
# do nothing, is a string or whatever
pass
def walk_fields(self, input: BaseModel | list | dict, field: str):
def walk_fields(self, input: BaseModel | list | dict, field: str | Tuple[str, ...]):
if isinstance(field, str):
field = (field,)
for item in self.walk(input):
if isinstance(item, tuple) and item[0] == field and item[1] is not None:
if isinstance(item, tuple) and item[0] in field and item[1] is not None:
yield item[1]

View file

@ -29,7 +29,7 @@ class ClassAdapter(Adapter):
name_parts.append(self.parent._get_full_name())
name_parts.append(self.cls.name)
name = '_'.join(name_parts)
name = '__'.join(name_parts)
elif self.cls.neurodata_type_inc is not None:
# again, this is against the schema, but is common
name = self.cls.neurodata_type_inc
@ -62,7 +62,9 @@ class ClassAdapter(Adapter):
return name
def handle_arraylike(self, dataset: Dataset, name:Optional[str]=None) -> Optional[ClassDefinition]:
def handle_arraylike(self, dataset: Dataset, name:Optional[str]=None) -> Optional[ClassDefinition | SlotDefinition]:
"""
Handling the
@ -96,6 +98,11 @@ class ClassAdapter(Adapter):
# need to have both if one is present!
raise ValueError(f"A dataset needs both dims and shape to define an arraylike object")
# Special cases
if dataset.neurodata_type_inc == 'VectorData':
# Handle this in `handle_vectorlike` instead
return None
# The schema language doesn't have a way of specifying a dataset/group is "abstract"
# and yet hdmf-common says you don't need a dtype if the dataset is "abstract"
# so....
@ -117,6 +124,18 @@ class ClassAdapter(Adapter):
dims_shape = tuple(dict.fromkeys(dims_shape).keys())
# if we only have one possible dimension, it's equivalent to a list, so we just return the slot
if len(dims_shape) == 1 and self.parent:
quantity = QUANTITY_MAP[dataset.quantity]
slot = SlotDefinition(
name=dataset.name,
range = dtype,
description=dataset.doc,
required=quantity['required'],
multivalued=True
)
return slot
# now make slots for each of them
slots = []
for dims, shape in dims_shape:
@ -140,6 +159,8 @@ class ClassAdapter(Adapter):
range=dtype
))
# and then the class is just a subclass of `Arraylike` (which is imported by default from `nwb.language.yaml`)
if name:
pass
@ -150,7 +171,7 @@ class ClassAdapter(Adapter):
else:
raise ValueError(f"Dataset has no name or type definition, what do call it?")
name = '_'.join([name, 'Array'])
name = '__'.join([name, 'Array'])
array_class = ClassDefinition(
name=name,
@ -203,13 +224,46 @@ class ClassAdapter(Adapter):
nested_classes.extend([ClassAdapter(cls=grp, parent=self) for grp in cls.groups])
nested_res = BuildResult()
for subclass in nested_classes:
this_slot = SlotDefinition(
name=subclass._get_name(),
description=subclass.cls.doc,
range=subclass._get_full_name(),
**QUANTITY_MAP[subclass.cls.quantity]
)
nested_res.slots.append(this_slot)
# handle the special case where `VectorData` is subclasssed without any dims or attributes
# which just gets instantiated as a 1-d array in HDF5
if subclass.cls.neurodata_type_inc == 'VectorData' and \
not subclass.cls.dims and \
not subclass.cls.shape and \
not subclass.cls.attributes \
and subclass.cls.name:
this_slot = SlotDefinition(
name=subclass.cls.name,
description=subclass.cls.doc,
range=self.handle_dtype(subclass.cls.dtype),
multivalued=True
)
nested_res.slots.append(this_slot)
continue
# Simplify datasets that are just a single value
elif isinstance(subclass.cls, Dataset) and \
not subclass.cls.neurodata_type_inc and \
not subclass.cls.attributes and \
not subclass.cls.dims and \
not subclass.cls.shape and \
subclass.cls.name:
this_slot = SlotDefinition(
name=subclass.cls.name,
description=subclass.cls.doc,
range=self.handle_dtype(subclass.cls.dtype),
**QUANTITY_MAP[subclass.cls.quantity]
)
nested_res.slots.append(this_slot)
continue
else:
this_slot = SlotDefinition(
name=subclass._get_name(),
description=subclass.cls.doc,
range=subclass._get_full_name(),
**QUANTITY_MAP[subclass.cls.quantity]
)
nested_res.slots.append(this_slot)
if subclass.cls.name is None and subclass.cls.neurodata_type_def is None:
# anonymous group that's just an inc, we only need the slot since the class is defined elsewhere
@ -246,14 +300,19 @@ class ClassAdapter(Adapter):
nested_res = BuildResult()
arraylike = self.handle_arraylike(self.cls, self._get_full_name())
if arraylike:
# make a slot for the arraylike class
attrs.append(
SlotDefinition(
name='array',
range=arraylike.name
# if the arraylike thing can only have one dimension, it's equivalent to a list, so
# we just add a multivalued slot
if isinstance(arraylike, SlotDefinition):
attrs.append(arraylike)
else:
# make a slot for the arraylike class
attrs.append(
SlotDefinition(
name='array',
range=arraylike.name
)
)
)
nested_res.classes.append(arraylike)
nested_res.classes.append(arraylike)
cls = ClassDefinition(

View file

@ -13,9 +13,15 @@ if TYPE_CHECKING:
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_schema_language import Group, Dataset
from typing import NamedTuple
from linkml_runtime.linkml_model import SchemaDefinition
class SplitSchema(NamedTuple):
main: BuildResult
split: BuildResult
class SchemaAdapter(Adapter):
"""
An individual schema file in nwb_schema_language
@ -24,8 +30,13 @@ class SchemaAdapter(Adapter):
groups: List[Group] = Field(default_factory=list)
datasets: List[Dataset] = Field(default_factory=list)
imports: List['SchemaAdapter'] = Field(default_factory=list)
namespace: Optional[str] = None
"""Populated by NamespacesAdapter"""
namespace: Optional[str] = Field(
None,
description="""String of containing namespace. Populated by NamespacesAdapter""")
split: bool = Field(
True,
description="Split anonymous subclasses into a separate schema file"
)
@property
def name(self) -> str:
@ -66,18 +77,69 @@ class SchemaAdapter(Adapter):
else:
built_results += cls.build()
if self.split:
sch_split = self.split_subclasses(built_results)
return sch_split
sch = SchemaDefinition(
name = self.name,
id = self.name,
imports = [i.name for i in self.imports],
classes=built_results.classes,
slots=built_results.slots,
types=built_results.types
else:
sch = SchemaDefinition(
name = self.name,
id = self.name,
imports = [i.name for i in self.imports],
classes=built_results.classes,
slots=built_results.slots,
types=built_results.types
)
# every schema needs the language elements
sch.imports.append('nwb.language')
return BuildResult(schemas=[sch])
def split_subclasses(self, classes: BuildResult) -> BuildResult:
"""
Split the generated classes into top-level "main" classes and
nested/anonymous "split" classes.
Args:
classes (BuildResult): A Build result object containing the classes
for the schema
Returns:
:class:`.SplitSchema`
"""
# just split by the presence or absence of __
main_classes = [c for c in classes.classes if '__' not in c.name]
split_classes = [c for c in classes.classes if '__' in c.name]
split_sch_name = '.'.join([self.name, 'include'])
imports = [i.name for i in self.imports]
imports.append('nwb.language')
# need to mutually import the two schemas because the subclasses
# could refer to the main classes
main_imports = imports
main_imports.append(split_sch_name)
imports.append(self.name)
main_sch = SchemaDefinition(
name=self.name,
id=self.name,
imports=main_imports,
classes=main_classes,
slots=classes.slots,
types=classes.types
)
# every schema needs the language elements
sch.imports.append('nwb.language')
return BuildResult(schemas=[sch])
split_sch = SchemaDefinition(
name=split_sch_name,
id=split_sch_name,
imports=imports,
classes=split_classes,
slots=classes.slots,
types=classes.types
)
res = BuildResult(
schemas=[main_sch, split_sch]
)
return res
@property
@ -94,7 +156,8 @@ class SchemaAdapter(Adapter):
- Need to also check classes used in links/references
"""
type_incs = self.walk_fields(self, 'neurodata_type_inc')
type_incs = self.walk_fields(self, ('neurodata_type_inc', 'target_type'))
definitions = [c.neurodata_type_def for c in self.created_classes]
need = [inc for inc in type_incs if inc not in definitions]
return need

View file

View file

@ -0,0 +1,447 @@
"""
Subclass of :class:`linkml.generators.PydanticGenerator`
The pydantic generator is a subclass of
- :class:`linkml.utils.generator.Generator`
- :class:`linkml.generators.oocodegen.OOCodeGenerator`
The default `__main__` method
- Instantiates the class
- Calls :meth:`~linkml.generators.PydanticGenerator.serialize`
The `serialize` method
- Accepts an optional jinja-style template, otherwise it uses the default template
- Uses :class:`linkml_runtime.utils.schemaview.SchemaView` to interact with the schema
- Generates linkML Classes
- `generate_enums` runs first
"""
import pdb
from typing import List, Dict, Set
from copy import deepcopy
import warnings
from nwb_linkml.maps.dtype import flat_to_npytyping
from linkml.generators import PydanticGenerator
from linkml_runtime.linkml_model.meta import (
Annotation,
ClassDefinition,
SchemaDefinition,
SlotDefinition,
SlotDefinitionName,
TypeDefinition,
ElementName
)
from linkml_runtime.utils.formatutils import camelcase, underscore
from linkml_runtime.utils.schemaview import SchemaView
from jinja2 import Template
def default_template(pydantic_ver: str = "1") -> str:
"""Constructs a default template for pydantic classes based on the version of pydantic"""
### HEADER ###
template = """
{#-
Jinja2 Template for a pydantic classes
-#}
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
{% for import_module, import_classes in imports.items() %}
from {{ import_module }} import (
{{ import_classes | join(',\n ') }}
)
{% endfor %}
metamodel_version = "{{metamodel_version}}"
version = "{{version if version else None}}"
"""
### BASE MODEL ###
if pydantic_ver == "1":
template += """
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = {% if allow_extra %}'allow'{% else %}'forbid'{% endif %},
arbitrary_types_allowed = True,
use_enum_values = True):
pass
"""
else:
template += """
class ConfiguredBaseModel(BaseModel,
validate_assignment = True,
validate_default = True,
extra = {% if allow_extra %}'allow'{% else %}'forbid'{% endif %},
arbitrary_types_allowed = True,
use_enum_values = True):
pass
"""
### ENUMS ###
template += """
{% for e in enums.values() %}
class {{ e.name }}(str, Enum):
{% if e.description -%}
\"\"\"
{{ e.description }}
\"\"\"
{%- endif %}
{% for _, pv in e['values'].items() -%}
{% if pv.description -%}
# {{pv.description}}
{%- endif %}
{{pv.label}} = "{{pv.value}}"
{% endfor %}
{% if not e['values'] -%}
dummy = "dummy"
{% endif %}
{% endfor %}
"""
### CLASSES ###
template += """
{%- for c in schema.classes.values() %}
class {{ c.name }}
{%- if class_isa_plus_mixins[c.name] -%}
({{class_isa_plus_mixins[c.name]|join(', ')}})
{%- else -%}
(ConfiguredBaseModel)
{%- endif -%}
:
{% if c.description -%}
\"\"\"
{{ c.description }}
\"\"\"
{%- endif %}
{% for attr in c.attributes.values() if c.attributes -%}
{{attr.name}}: {{ attr.annotations['python_range'].value }} = Field(
{%- if predefined_slot_values[c.name][attr.name] -%}
{{ predefined_slot_values[c.name][attr.name] }}
{%- elif attr.required -%}
...
{%- else -%}
None
{%- endif -%}
{%- if attr.title != None %}, title="{{attr.title}}"{% endif -%}
{%- if attr.description %}, description=\"\"\"{{attr.description}}\"\"\"{% endif -%}
{%- if attr.minimum_value != None %}, ge={{attr.minimum_value}}{% endif -%}
{%- if attr.maximum_value != None %}, le={{attr.maximum_value}}{% endif -%}
)
{% else -%}
None
{% endfor %}
{% endfor %}
"""
### FWD REFS / REBUILD MODEL ###
if pydantic_ver == "1":
template += """
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
{% for c in schema.classes.values() -%}
{{ c.name }}.update_forward_refs()
{% endfor %}
"""
else:
template += """
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
{% for c in schema.classes.values() -%}
{{ c.name }}.model_rebuild()
{% endfor %}
"""
return template
class NWBPydanticGenerator(PydanticGenerator):
SKIP_ENUM=('FlatDType',)
def _get_imports(self, sv:SchemaView) -> Dict[str, List[str]]:
all_classes = sv.all_classes(imports=True)
local_classes = sv.all_classes(imports=False)
needed_classes = []
# find needed classes - is_a and slot ranges
for clsname, cls in local_classes.items():
needed_classes.append(cls.is_a)
for slot_name, slot in cls.attributes.items():
if slot.range in all_classes:
needed_classes.append(slot.range)
needed_classes = [cls for cls in set(needed_classes) if cls is not None]
imports = {}
# These classes are not generated by pydantic!
skips = ('AnyType',)
for cls in needed_classes:
if cls in skips:
continue
# Find module that contains class
module_name = sv.element_by_schema_map()[ElementName(cls)]
# Don't get classes that are defined in this schema!
if module_name == self.schema.name:
continue
local_mod_name = '.' + module_name.replace('.', '_').replace('-','_')
if local_mod_name not in imports:
imports[local_mod_name] = [camelcase(cls)]
else:
imports[local_mod_name].append(camelcase(cls))
return imports
def _get_classes(self, sv:SchemaView, imports: Dict[str, List[str]]) -> List[ClassDefinition]:
module_classes = sv.all_classes(imports=False).values()
imported_classes = []
for classes in imports.values():
imported_classes.extend(classes)
# pdb.set_trace()
sorted_classes = self.sort_classes(list(module_classes), imported_classes)
self.sorted_class_names = [camelcase(cname) for cname in imported_classes]
self.sorted_class_names += [camelcase(c.name) for c in sorted_classes]
# Don't want to generate classes when class_uri is linkml:Any, will
# just swap in typing.Any instead down below
sorted_classes = [c for c in sorted_classes if c.class_uri != "linkml:Any"]
return sorted_classes
def _build_class(self, class_original:ClassDefinition) -> ClassDefinition:
class_def: ClassDefinition
class_def = deepcopy(class_original)
class_def.name = camelcase(class_original.name)
if class_def.is_a:
class_def.is_a = camelcase(class_def.is_a)
class_def.mixins = [camelcase(p) for p in class_def.mixins]
if class_def.description:
class_def.description = class_def.description.replace('"', '\\"')
return class_def
def _check_anyof(self, s:SlotDefinition, sn: SlotDefinitionName, sv:SchemaView):
# Confirm that the original slot range (ignoring the default that comes in from
# induced_slot) isn't in addition to setting any_of
if len(s.any_of) > 0 and sv.get_slot(sn).range is not None:
base_range_subsumes_any_of = False
base_range = sv.get_slot(sn).range
base_range_cls = sv.get_class(base_range, strict=False)
if base_range_cls is not None and base_range_cls.class_uri == "linkml:Any":
base_range_subsumes_any_of = True
if not base_range_subsumes_any_of:
raise ValueError("Slot cannot have both range and any_of defined")
def _get_numpy_slot_range(self, cls:ClassDefinition) -> str:
# slot always starts with...
prefix='NDArray['
# and then we specify the shape:
shape_prefix = 'Shape["'
# using the cardinality from the attributes
dim_pieces = []
for attr in cls.attributes.values():
if attr.maximum_cardinality:
shape_part = str(attr.maximum_cardinality)
else:
shape_part = "*"
# do this cheaply instead of using regex because i want to see if this works at all first...
name_part = attr.name.replace(',', '_').replace(' ', '_').replace('__', '_')
dim_pieces.append(' '.join([shape_part, name_part]))
dimension = ', '.join(dim_pieces)
shape_suffix = '"], '
# all dimensions should be the same dtype
try:
dtype = flat_to_npytyping[list(cls.attributes.values())[0].range]
except KeyError as e:
warnings.warn(e)
range = list(cls.attributes.values())[0].range
return f'List[{range}] | {range}'
suffix = "]"
slot = ''.join([prefix, shape_prefix, dimension, shape_suffix, dtype, suffix])
return slot
def sort_classes(self, clist: List[ClassDefinition], imports:List[str]) -> List[ClassDefinition]:
"""
sort classes such that if C is a child of P then C appears after P in the list
Overridden method include mixin classes
Modified from original to allow for imported classes
"""
clist = list(clist)
slist = [] # sorted
while len(clist) > 0:
can_add = False
for i in range(len(clist)):
candidate = clist[i]
can_add = False
if candidate.is_a:
candidates = [candidate.is_a] + candidate.mixins
else:
candidates = candidate.mixins
if not candidates:
can_add = True
else:
if set(candidates) <= set([p.name for p in slist] + imports):
can_add = True
if can_add:
slist = slist + [candidate]
del clist[i]
break
if not can_add:
raise ValueError(
f"could not find suitable element in {clist} that does not ref {slist}"
)
return slist
def get_class_slot_range(self, slot_range: str, inlined: bool, inlined_as_list: bool) -> str:
"""
Monkeypatch to convert Array typed slots and classes into npytyped hints
"""
sv = self.schemaview
range_cls = sv.get_class(slot_range)
if range_cls.is_a == "Arraylike":
return self._get_numpy_slot_range(range_cls)
else:
return super().get_class_slot_range(slot_range, inlined, inlined_as_list)
def get_class_isa_plus_mixins(self) -> Dict[str, List[str]]:
"""
Generate the inheritance list for each class from is_a plus mixins
Patched to only get local classes
:return:
"""
sv = self.schemaview
parents = {}
for class_def in sv.all_classes(imports=False).values():
class_parents = []
if class_def.is_a:
class_parents.append(camelcase(class_def.is_a))
if self.gen_mixin_inheritance and class_def.mixins:
class_parents.extend([camelcase(mixin) for mixin in class_def.mixins])
if len(class_parents) > 0:
# Use the sorted list of classes to order the parent classes, but reversed to match MRO needs
class_parents.sort(key=lambda x: self.sorted_class_names.index(x))
class_parents.reverse()
parents[camelcase(class_def.name)] = class_parents
return parents
def serialize(self) -> str:
if self.template_file is not None:
with open(self.template_file) as template_file:
template_obj = Template(template_file.read())
else:
template_obj = Template(default_template(self.pydantic_version))
sv: SchemaView
sv = self.schemaview
schema = sv.schema
pyschema = SchemaDefinition(
id=schema.id,
name=schema.name,
description=schema.description.replace('"', '\\"') if schema.description else None,
)
enums = self.generate_enums(sv.all_enums())
# filter skipped enums
enums = {k:v for k,v in enums.items() if k not in self.SKIP_ENUM}
# import from local references, rather than serializing every class in every file
imports = self._get_imports(sv)
sorted_classes = self._get_classes(sv, imports)
for class_original in sorted_classes:
# Generate class definition
class_def = self._build_class(class_original)
pyschema.classes[class_def.name] = class_def
# Not sure why this happens
for attribute in list(class_def.attributes.keys()):
del class_def.attributes[attribute]
class_name = class_original.name
for sn in sv.class_slots(class_name):
# TODO: fix runtime, copy should not be necessary
s = deepcopy(sv.induced_slot(sn, class_name))
# logging.error(f'Induced slot {class_name}.{sn} == {s.name} {s.range}')
s.name = underscore(s.name)
if s.description:
s.description = s.description.replace('"', '\\"')
class_def.attributes[s.name] = s
slot_ranges: List[str] = []
self._check_anyof(s, sn, sv)
if s.any_of is not None and len(s.any_of) > 0:
# list comprehension here is pulling ranges from within AnonymousSlotExpression
slot_ranges.extend([r.range for r in s.any_of])
else:
slot_ranges.append(s.range)
pyranges = [
self.generate_python_range(slot_range, s, class_def)
for slot_range in slot_ranges
]
pyranges = list(set(pyranges)) # remove duplicates
pyranges.sort()
if len(pyranges) == 1:
pyrange = pyranges[0]
elif len(pyranges) > 1:
pyrange = f"Union[{', '.join(pyranges)}]"
else:
raise Exception(f"Could not generate python range for {class_name}.{s.name}")
if s.multivalued:
if s.inlined or s.inlined_as_list:
collection_key = self.generate_collection_key(slot_ranges, s, class_def)
else:
collection_key = None
if s.inlined is False or collection_key is None or s.inlined_as_list is True:
pyrange = f"List[{pyrange}]"
else:
pyrange = f"Dict[{collection_key}, {pyrange}]"
if not s.required and not s.designates_type:
pyrange = f"Optional[{pyrange}]"
ann = Annotation("python_range", pyrange)
s.annotations[ann.tag] = ann
code = template_obj.render(
imports=imports,
schema=pyschema,
underscore=underscore,
enums=enums,
predefined_slot_values=self.get_predefined_slot_values(),
allow_extra=self.allow_extra,
metamodel_version=self.schema.metamodel_version,
version=self.schema.version,
class_isa_plus_mixins=self.get_class_isa_plus_mixins(),
)
return code

View file

@ -28,4 +28,32 @@ flat_to_linkml = {
}
"""
Map between the flat data types and the simpler linkml base types
"""
"""
flat_to_npytyping = {
"float": "Float",
"float32": "Float32",
"double": "Double",
"float64": "Float64",
"long": "LongLong",
"int64": "Int64",
"int": "Int",
"int32": "Int32",
"int16": "Int16",
"short": "Short",
"int8": "Int8",
"uint": "UInt",
"uint32": "UInt32",
"uint16": "UInt16",
"uint8": "UInt8",
"uint64": "UInt64",
"numeric": "Number",
"text": "String",
"utf": "Unicode",
"utf8": "Unicode",
"utf_8": "Unicode",
"ascii": "String",
"bool": "Bool",
"isodatetime": "Datetime64",
'AnyType': 'Any'
}

View file

@ -1,411 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
TimeSeriesData.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesSync.update_forward_refs()
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,929 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class SpatialSeriesData(ConfiguredBaseModel):
"""
1-D or 2-D array storing position or direction relative to some reference frame.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
array: Optional[SpatialSeriesDataArray] = Field(None)
class SpatialSeriesReferenceFrame(ConfiguredBaseModel):
"""
Description defining what exactly 'straight-ahead' means.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class SpatialSeriesDataArray(Arraylike):
num_times: float = Field(...)
x: Optional[float] = Field(None)
xy: Optional[float] = Field(None)
xyz: Optional[float] = Field(None)
class AbstractFeatureSeriesData(ConfiguredBaseModel):
"""
Values of each feature at each time.
"""
unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""")
array: Optional[AbstractFeatureSeriesDataArray] = Field(None)
class AbstractFeatureSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_features: Optional[float] = Field(None)
class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel):
"""
Units of each feature.
"""
array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None)
class AbstractFeatureSeriesFeatureUnitsArray(Arraylike):
num_features: str = Field(...)
class AbstractFeatureSeriesFeatures(ConfiguredBaseModel):
"""
Description of the features represented in TimeSeries::data.
"""
array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None)
class AbstractFeatureSeriesFeaturesArray(Arraylike):
num_features: str = Field(...)
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[AnnotationSeriesDataArray] = Field(None)
class AnnotationSeriesDataArray(Arraylike):
num_times: str = Field(...)
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[IntervalSeriesDataArray] = Field(None)
class IntervalSeriesDataArray(Arraylike):
num_times: int = Field(...)
class DecompositionSeriesData(ConfiguredBaseModel):
"""
Data decomposed into frequency bands.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""")
array: Optional[DecompositionSeriesDataArray] = Field(None)
class DecompositionSeriesDataArray(Arraylike):
num_times: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_bands: Optional[float] = Field(None)
class DecompositionSeriesMetric(ConfiguredBaseModel):
"""
The metric used, e.g. phase, amplitude, power.
"""
None
class DecompositionSeriesBandsBandLimitsArray(Arraylike):
num_bands: Optional[float] = Field(None)
low_high: Optional[float] = Field(None)
class DecompositionSeriesBandsBandMeanArray(Arraylike):
num_bands: float = Field(...)
class DecompositionSeriesBandsBandStdevArray(Arraylike):
num_bands: float = Field(...)
class UnitsObsIntervalsArray(Arraylike):
num_intervals: Optional[float] = Field(None)
start|end: Optional[float] = Field(None)
class UnitsWaveformMeanArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformSdArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformsArray(Arraylike):
num_waveforms: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandName(VectorData):
"""
Name of the band, e.g. theta.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandLimits(VectorData):
"""
Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.
"""
array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandMean(VectorData):
"""
The mean Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandStdev(VectorData):
"""
The standard deviation of Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimes(VectorData):
"""
Spike times for each unit in seconds.
"""
resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsObsIntervals(VectorData):
"""
Observation intervals for each unit.
"""
array: Optional[UnitsObsIntervalsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodeGroup(VectorData):
"""
Electrode group that each spike unit came from.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformSdArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimesIndex(VectorIndex):
"""
Index into the spike_times dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsObsIntervalsIndex(VectorIndex):
"""
Index into the obs_intervals dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodesIndex(VectorIndex):
"""
Index into electrodes.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndex(VectorIndex):
"""
Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndexIndex(VectorIndex):
"""
Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
"""
DynamicTableRegion pointer to the channels that this decomposition series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class UnitsElectrodes(DynamicTableRegion):
"""
Electrode that each spike unit came from, specified using a DynamicTableRegion.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DecompositionSeriesBands(DynamicTable):
"""
Table for describing the bands that this series was generated from. There should be one row in this table for each band.
"""
band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""")
band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""")
band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""")
band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class Units(DynamicTable):
"""
Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class BehavioralEpochs(NWBDataInterface):
"""
TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data.
"""
IntervalSeries: Optional[List[IntervalSeries]] = Field(default_factory=list, description="""IntervalSeries object containing start and stop times of epochs.""")
class BehavioralEvents(NWBDataInterface):
"""
TimeSeries for storing behavioral events. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing behavioral events.""")
class BehavioralTimeSeries(NWBDataInterface):
"""
TimeSeries for storing Behavoioral time series data. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing continuous behavioral data.""")
class PupilTracking(NWBDataInterface):
"""
Eye-tracking data, representing pupil size.
"""
TimeSeries: List[TimeSeries] = Field(default_factory=list, description="""TimeSeries object containing time series data on pupil size.""")
class EyeTracking(NWBDataInterface):
"""
Eye-tracking data, representing direction of gaze.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing data measuring direction of gaze.""")
class CompassDirection(NWBDataInterface):
"""
With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing direction of gaze travel.""")
class Position(NWBDataInterface):
"""
Position data, whether along the x, x/y or x/y/z axis.
"""
SpatialSeries: List[SpatialSeries] = Field(default_factory=list, description="""SpatialSeries object containing position data.""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class SpatialSeries(TimeSeries):
"""
Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values.
"""
data: SpatialSeriesData = Field(..., description="""1-D or 2-D array storing position or direction relative to some reference frame.""")
reference_frame: Optional[SpatialSeriesReferenceFrame] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AbstractFeatureSeries(TimeSeries):
"""
Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
"""
data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""")
feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""")
features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AnnotationSeries(TimeSeries):
"""
Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
"""
data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
"""
data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
"""
data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""")
metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""")
bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
SpatialSeriesData.update_forward_refs()
SpatialSeriesReferenceFrame.update_forward_refs()
Arraylike.update_forward_refs()
SpatialSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesData.update_forward_refs()
AbstractFeatureSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesFeatureUnits.update_forward_refs()
AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs()
AbstractFeatureSeriesFeatures.update_forward_refs()
AbstractFeatureSeriesFeaturesArray.update_forward_refs()
AnnotationSeriesData.update_forward_refs()
AnnotationSeriesDataArray.update_forward_refs()
IntervalSeriesData.update_forward_refs()
IntervalSeriesDataArray.update_forward_refs()
DecompositionSeriesData.update_forward_refs()
DecompositionSeriesDataArray.update_forward_refs()
DecompositionSeriesMetric.update_forward_refs()
DecompositionSeriesBandsBandLimitsArray.update_forward_refs()
DecompositionSeriesBandsBandMeanArray.update_forward_refs()
DecompositionSeriesBandsBandStdevArray.update_forward_refs()
UnitsObsIntervalsArray.update_forward_refs()
UnitsWaveformMeanArray.update_forward_refs()
UnitsWaveformSdArray.update_forward_refs()
UnitsWaveformsArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
DecompositionSeriesBandsBandName.update_forward_refs()
DecompositionSeriesBandsBandLimits.update_forward_refs()
DecompositionSeriesBandsBandMean.update_forward_refs()
DecompositionSeriesBandsBandStdev.update_forward_refs()
UnitsSpikeTimes.update_forward_refs()
UnitsObsIntervals.update_forward_refs()
UnitsElectrodeGroup.update_forward_refs()
UnitsWaveformMean.update_forward_refs()
UnitsWaveformSd.update_forward_refs()
UnitsWaveforms.update_forward_refs()
VectorIndex.update_forward_refs()
UnitsSpikeTimesIndex.update_forward_refs()
UnitsObsIntervalsIndex.update_forward_refs()
UnitsElectrodesIndex.update_forward_refs()
UnitsWaveformsIndex.update_forward_refs()
UnitsWaveformsIndexIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DecompositionSeriesSourceChannels.update_forward_refs()
UnitsElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
DecompositionSeriesBands.update_forward_refs()
Units.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
BehavioralEpochs.update_forward_refs()
BehavioralEvents.update_forward_refs()
BehavioralTimeSeries.update_forward_refs()
PupilTracking.update_forward_refs()
EyeTracking.update_forward_refs()
CompassDirection.update_forward_refs()
Position.update_forward_refs()
TimeSeries.update_forward_refs()
SpatialSeries.update_forward_refs()
AbstractFeatureSeries.update_forward_refs()
AnnotationSeries.update_forward_refs()
IntervalSeries.update_forward_refs()
DecompositionSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -1,420 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class Device(NWBContainer):
"""
Metadata about a data acquisition device, e.g., recording system, electrode, microscope.
"""
description: Optional[str] = Field(None, description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""")
manufacturer: Optional[str] = Field(None, description="""The name of the manufacturer of the device.""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
Device.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,785 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ElectricalSeriesData(ConfiguredBaseModel):
"""
Recorded voltage data.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""")
array: Optional[ElectricalSeriesDataArray] = Field(None)
class ElectricalSeriesChannelConversion(ConfiguredBaseModel):
"""
Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.
"""
axis: Optional[int] = Field(None, description="""The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1.""")
array: Optional[ElectricalSeriesChannelConversionArray] = Field(None)
class SpikeEventSeriesData(ConfiguredBaseModel):
"""
Spike waveforms.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for waveforms, which is fixed to 'volts'.""")
array: Optional[SpikeEventSeriesDataArray] = Field(None)
class SpikeEventSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[SpikeEventSeriesTimestampsArray] = Field(None)
class FeatureExtractionDescription(ConfiguredBaseModel):
"""
Description of features (eg, ''PC1'') for each of the extracted features.
"""
array: Optional[FeatureExtractionDescriptionArray] = Field(None)
class FeatureExtractionFeatures(ConfiguredBaseModel):
"""
Multi-dimensional array of features extracted from each event.
"""
array: Optional[FeatureExtractionFeaturesArray] = Field(None)
class FeatureExtractionTimes(ConfiguredBaseModel):
"""
Times of events that features correspond to (can be a link).
"""
array: Optional[FeatureExtractionTimesArray] = Field(None)
class EventDetectionDetectionMethod(ConfiguredBaseModel):
"""
Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.
"""
None
class EventDetectionSourceIdx(ConfiguredBaseModel):
"""
Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.
"""
array: Optional[EventDetectionSourceIdxArray] = Field(None)
class EventDetectionTimes(ConfiguredBaseModel):
"""
Timestamps of events, in seconds.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for event times, which is fixed to 'seconds'.""")
array: Optional[EventDetectionTimesArray] = Field(None)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
None
class ClusterWaveformsWaveformFiltering(ConfiguredBaseModel):
"""
Filtering applied to data before generating mean/sd
"""
None
class ClusterWaveformsWaveformMean(ConfiguredBaseModel):
"""
The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)
"""
array: Optional[ClusterWaveformsWaveformMeanArray] = Field(None)
class ClusterWaveformsWaveformSd(ConfiguredBaseModel):
"""
Stdev of waveforms for each cluster, using the same indices as in mean
"""
array: Optional[ClusterWaveformsWaveformSdArray] = Field(None)
class ClusteringDescription(ConfiguredBaseModel):
"""
Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)
"""
None
class ClusteringNum(ConfiguredBaseModel):
"""
Cluster number of each event
"""
array: Optional[ClusteringNumArray] = Field(None)
class ClusteringPeakOverRms(ConfiguredBaseModel):
"""
Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).
"""
array: Optional[ClusteringPeakOverRmsArray] = Field(None)
class ClusteringTimes(ConfiguredBaseModel):
"""
Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.
"""
array: Optional[ClusteringTimesArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ElectricalSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_channels: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ElectricalSeriesChannelConversionArray(Arraylike):
num_channels: float = Field(...)
class SpikeEventSeriesDataArray(Arraylike):
num_events: float = Field(...)
num_samples: float = Field(...)
num_channels: Optional[float] = Field(None)
class SpikeEventSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class FeatureExtractionDescriptionArray(Arraylike):
num_features: str = Field(...)
class FeatureExtractionFeaturesArray(Arraylike):
num_events: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_features: Optional[float] = Field(None)
class FeatureExtractionTimesArray(Arraylike):
num_events: float = Field(...)
class EventDetectionSourceIdxArray(Arraylike):
num_events: int = Field(...)
class EventDetectionTimesArray(Arraylike):
num_events: float = Field(...)
class ClusterWaveformsWaveformMeanArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusterWaveformsWaveformSdArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusteringNumArray(Arraylike):
num_events: int = Field(...)
class ClusteringPeakOverRmsArray(Arraylike):
num_clusters: float = Field(...)
class ClusteringTimesArray(Arraylike):
num_events: float = Field(...)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class ElectricalSeriesElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class FeatureExtractionElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class ElectrodeGroup(NWBContainer):
"""
A physical grouping of electrodes, e.g. a shank of an array.
"""
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
location: Optional[str] = Field(None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""")
position: Optional[ElectrodeGroupPosition] = Field(None, description="""stereotaxic or common framework coordinates""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
"""
description: FeatureExtractionDescription = Field(..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""")
features: FeatureExtractionFeatures = Field(..., description="""Multi-dimensional array of features extracted from each event.""")
times: FeatureExtractionTimes = Field(..., description="""Times of events that features correspond to (can be a link).""")
electrodes: FeatureExtractionElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
class EventDetection(NWBDataInterface):
"""
Detected spike events from voltage trace(s).
"""
detection_method: EventDetectionDetectionMethod = Field(..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""")
source_idx: EventDetectionSourceIdx = Field(..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""")
times: EventDetectionTimes = Field(..., description="""Timestamps of events, in seconds.""")
class EventWaveform(NWBDataInterface):
"""
Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition.
"""
SpikeEventSeries: Optional[List[SpikeEventSeries]] = Field(default_factory=list, description="""SpikeEventSeries object(s) containing detected spike event waveforms.""")
class FilteredEphys(NWBDataInterface):
"""
Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing filtered electrophysiology data.""")
class LFP(NWBDataInterface):
"""
LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing LFP data for one or more channels.""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
waveform_filtering: ClusterWaveformsWaveformFiltering = Field(..., description="""Filtering applied to data before generating mean/sd""")
waveform_mean: ClusterWaveformsWaveformMean = Field(..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""")
waveform_sd: ClusterWaveformsWaveformSd = Field(..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""")
class Clustering(NWBDataInterface):
"""
DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting.
"""
description: ClusteringDescription = Field(..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""")
num: ClusteringNum = Field(..., description="""Cluster number of each event""")
peak_over_rms: ClusteringPeakOverRms = Field(..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""")
times: ClusteringTimes = Field(..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ElectricalSeries(TimeSeries):
"""
A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels.
"""
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
"""
data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: SpikeEventSeriesTimestamps = Field(..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""")
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ElectricalSeriesData.update_forward_refs()
ElectricalSeriesChannelConversion.update_forward_refs()
SpikeEventSeriesData.update_forward_refs()
SpikeEventSeriesTimestamps.update_forward_refs()
FeatureExtractionDescription.update_forward_refs()
FeatureExtractionFeatures.update_forward_refs()
FeatureExtractionTimes.update_forward_refs()
EventDetectionDetectionMethod.update_forward_refs()
EventDetectionSourceIdx.update_forward_refs()
EventDetectionTimes.update_forward_refs()
ElectrodeGroupPosition.update_forward_refs()
ClusterWaveformsWaveformFiltering.update_forward_refs()
ClusterWaveformsWaveformMean.update_forward_refs()
ClusterWaveformsWaveformSd.update_forward_refs()
ClusteringDescription.update_forward_refs()
ClusteringNum.update_forward_refs()
ClusteringPeakOverRms.update_forward_refs()
ClusteringTimes.update_forward_refs()
Arraylike.update_forward_refs()
ElectricalSeriesDataArray.update_forward_refs()
ElectricalSeriesChannelConversionArray.update_forward_refs()
SpikeEventSeriesDataArray.update_forward_refs()
SpikeEventSeriesTimestampsArray.update_forward_refs()
FeatureExtractionDescriptionArray.update_forward_refs()
FeatureExtractionFeaturesArray.update_forward_refs()
FeatureExtractionTimesArray.update_forward_refs()
EventDetectionSourceIdxArray.update_forward_refs()
EventDetectionTimesArray.update_forward_refs()
ClusterWaveformsWaveformMeanArray.update_forward_refs()
ClusterWaveformsWaveformSdArray.update_forward_refs()
ClusteringNumArray.update_forward_refs()
ClusteringPeakOverRmsArray.update_forward_refs()
ClusteringTimesArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
ElectricalSeriesElectrodes.update_forward_refs()
FeatureExtractionElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
ElectrodeGroup.update_forward_refs()
NWBDataInterface.update_forward_refs()
FeatureExtraction.update_forward_refs()
EventDetection.update_forward_refs()
EventWaveform.update_forward_refs()
FilteredEphys.update_forward_refs()
LFP.update_forward_refs()
ClusterWaveforms.update_forward_refs()
Clustering.update_forward_refs()
TimeSeries.update_forward_refs()
ElectricalSeries.update_forward_refs()
SpikeEventSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -1,484 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsStartTime(VectorData):
"""
Start time of epoch, in seconds.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsStopTime(VectorData):
"""
Stop time of epoch, in seconds.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsTags(VectorData):
"""
User-defined tags that identify or categorize events.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData):
"""
An index into a TimeSeries object.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class TimeIntervalsTagsIndex(VectorIndex):
"""
Index for tags.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class TimeIntervals(DynamicTable):
"""
A container for aggregating epoch data and the TimeSeries that each epoch applies to.
"""
start_time: TimeIntervalsStartTime = Field(..., description="""Start time of epoch, in seconds.""")
stop_time: TimeIntervalsStopTime = Field(..., description="""Stop time of epoch, in seconds.""")
tags: Optional[TimeIntervalsTags] = Field(None, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeIntervalsStartTime.update_forward_refs()
TimeIntervalsStopTime.update_forward_refs()
TimeIntervalsTags.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
TimeIntervalsTimeseries.update_forward_refs()
VectorIndex.update_forward_refs()
TimeIntervalsTagsIndex.update_forward_refs()
TimeIntervalsTimeseriesIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
TimeIntervals.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,656 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ImageSeriesData(ConfiguredBaseModel):
"""
Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
"""
array: Optional[ImageSeriesDataArray] = Field(None)
class ImageSeriesDimension(ConfiguredBaseModel):
"""
Number of pixels on x, y, (and z) axes.
"""
array: Optional[ImageSeriesDimensionArray] = Field(None)
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
"""
starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""")
array: Optional[ImageSeriesExternalFileArray] = Field(None)
class ImageSeriesFormat(ConfiguredBaseModel):
"""
Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.
"""
None
class OpticalSeriesDistance(ConfiguredBaseModel):
"""
Distance from camera/monitor to target/eye.
"""
None
class OpticalSeriesFieldOfView(ConfiguredBaseModel):
"""
Width, height and depth of image, or imaged area, in meters.
"""
array: Optional[OpticalSeriesFieldOfViewArray] = Field(None)
class OpticalSeriesData(ConfiguredBaseModel):
"""
Images presented to subject, either grayscale or RGB
"""
array: Optional[OpticalSeriesDataArray] = Field(None)
class OpticalSeriesOrientation(ConfiguredBaseModel):
"""
Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.
"""
None
class IndexSeriesData(ConfiguredBaseModel):
"""
Index of the image (using zero-indexing) in the linked Images object.
"""
conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""")
array: Optional[IndexSeriesDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class GrayscaleImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
class RGBImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b: Optional[float] = Field(None)
class RGBAImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
z: Optional[float] = Field(None)
class ImageSeriesDimensionArray(Arraylike):
rank: int = Field(...)
class ImageSeriesExternalFileArray(Arraylike):
num_files: str = Field(...)
class OpticalSeriesFieldOfViewArray(Arraylike):
width_height: Optional[float] = Field(None)
width_height_depth: Optional[float] = Field(None)
class OpticalSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
class IndexSeriesDataArray(Arraylike):
num_times: int = Field(...)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class GrayscaleImage(Image):
"""
A grayscale image.
"""
array: Optional[GrayscaleImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBImage(Image):
"""
A color image.
"""
array: Optional[RGBImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBAImage(Image):
"""
A color image with transparency.
"""
array: Optional[RGBAImageArray] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ImageSeries(TimeSeries):
"""
General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z].
"""
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ImageMaskSeries(ImageSeries):
"""
An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed.
"""
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class OpticalSeries(ImageSeries):
"""
Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important.
"""
distance: Optional[OpticalSeriesDistance] = Field(None, description="""Distance from camera/monitor to target/eye.""")
field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[OpticalSeriesOrientation] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[ImageSeriesFormat] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
"""
data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImageSeriesData.update_forward_refs()
ImageSeriesDimension.update_forward_refs()
ImageSeriesExternalFile.update_forward_refs()
ImageSeriesFormat.update_forward_refs()
OpticalSeriesDistance.update_forward_refs()
OpticalSeriesFieldOfView.update_forward_refs()
OpticalSeriesData.update_forward_refs()
OpticalSeriesOrientation.update_forward_refs()
IndexSeriesData.update_forward_refs()
Arraylike.update_forward_refs()
GrayscaleImageArray.update_forward_refs()
RGBImageArray.update_forward_refs()
RGBAImageArray.update_forward_refs()
ImageSeriesDataArray.update_forward_refs()
ImageSeriesDimensionArray.update_forward_refs()
ImageSeriesExternalFileArray.update_forward_refs()
OpticalSeriesFieldOfViewArray.update_forward_refs()
OpticalSeriesDataArray.update_forward_refs()
IndexSeriesDataArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
GrayscaleImage.update_forward_refs()
RGBImage.update_forward_refs()
RGBAImage.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
ImageSeries.update_forward_refs()
ImageMaskSeries.update_forward_refs()
OpticalSeries.update_forward_refs()
IndexSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,831 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class AbstractFeatureSeriesData(ConfiguredBaseModel):
"""
Values of each feature at each time.
"""
unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""")
array: Optional[AbstractFeatureSeriesDataArray] = Field(None)
class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel):
"""
Units of each feature.
"""
array: Optional[AbstractFeatureSeriesFeatureUnitsArray] = Field(None)
class AbstractFeatureSeriesFeatures(ConfiguredBaseModel):
"""
Description of the features represented in TimeSeries::data.
"""
array: Optional[AbstractFeatureSeriesFeaturesArray] = Field(None)
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[AnnotationSeriesDataArray] = Field(None)
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
array: Optional[IntervalSeriesDataArray] = Field(None)
class DecompositionSeriesData(ConfiguredBaseModel):
"""
Data decomposed into frequency bands.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""")
array: Optional[DecompositionSeriesDataArray] = Field(None)
class DecompositionSeriesMetric(ConfiguredBaseModel):
"""
The metric used, e.g. phase, amplitude, power.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class AbstractFeatureSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_features: Optional[float] = Field(None)
class AbstractFeatureSeriesFeatureUnitsArray(Arraylike):
num_features: str = Field(...)
class AbstractFeatureSeriesFeaturesArray(Arraylike):
num_features: str = Field(...)
class AnnotationSeriesDataArray(Arraylike):
num_times: str = Field(...)
class IntervalSeriesDataArray(Arraylike):
num_times: int = Field(...)
class DecompositionSeriesDataArray(Arraylike):
num_times: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_bands: Optional[float] = Field(None)
class DecompositionSeriesBandsBandLimitsArray(Arraylike):
num_bands: Optional[float] = Field(None)
low_high: Optional[float] = Field(None)
class DecompositionSeriesBandsBandMeanArray(Arraylike):
num_bands: float = Field(...)
class DecompositionSeriesBandsBandStdevArray(Arraylike):
num_bands: float = Field(...)
class UnitsObsIntervalsArray(Arraylike):
num_intervals: Optional[float] = Field(None)
start|end: Optional[float] = Field(None)
class UnitsWaveformMeanArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformSdArray(Arraylike):
num_units: float = Field(...)
num_samples: float = Field(...)
num_electrodes: Optional[float] = Field(None)
class UnitsWaveformsArray(Arraylike):
num_waveforms: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandName(VectorData):
"""
Name of the band, e.g. theta.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class DecompositionSeriesBandsBandLimits(VectorData):
"""
Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.
"""
array: Optional[DecompositionSeriesBandsBandLimitsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandMean(VectorData):
"""
The mean Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class DecompositionSeriesBandsBandStdev(VectorData):
"""
The standard deviation of Gaussian filters, in Hz.
"""
array: Optional[DecompositionSeriesBandsBandStdevArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimes(VectorData):
"""
Spike times for each unit in seconds.
"""
resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsObsIntervals(VectorData):
"""
Observation intervals for each unit.
"""
array: Optional[UnitsObsIntervalsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodeGroup(VectorData):
"""
Electrode group that each spike unit came from.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformMeanArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformSdArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
array: Optional[UnitsWaveformsArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsSpikeTimesIndex(VectorIndex):
"""
Index into the spike_times dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsObsIntervalsIndex(VectorIndex):
"""
Index into the obs_intervals dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsElectrodesIndex(VectorIndex):
"""
Index into electrodes.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndex(VectorIndex):
"""
Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class UnitsWaveformsIndexIndex(VectorIndex):
"""
Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
"""
DynamicTableRegion pointer to the channels that this decomposition series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class UnitsElectrodes(DynamicTableRegion):
"""
Electrode that each spike unit came from, specified using a DynamicTableRegion.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DecompositionSeriesBands(DynamicTable):
"""
Table for describing the bands that this series was generated from. There should be one row in this table for each band.
"""
band_name: DecompositionSeriesBandsBandName = Field(..., description="""Name of the band, e.g. theta.""")
band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""")
band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""")
band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class Units(DynamicTable):
"""
Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[UnitsElectrodeGroup] = Field(None, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AbstractFeatureSeries(TimeSeries):
"""
Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
"""
data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""")
feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""")
features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AnnotationSeries(TimeSeries):
"""
Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
"""
data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
"""
data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
"""
data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""")
metric: DecompositionSeriesMetric = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""")
bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
AbstractFeatureSeriesData.update_forward_refs()
AbstractFeatureSeriesFeatureUnits.update_forward_refs()
AbstractFeatureSeriesFeatures.update_forward_refs()
AnnotationSeriesData.update_forward_refs()
IntervalSeriesData.update_forward_refs()
DecompositionSeriesData.update_forward_refs()
DecompositionSeriesMetric.update_forward_refs()
Arraylike.update_forward_refs()
AbstractFeatureSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesFeatureUnitsArray.update_forward_refs()
AbstractFeatureSeriesFeaturesArray.update_forward_refs()
AnnotationSeriesDataArray.update_forward_refs()
IntervalSeriesDataArray.update_forward_refs()
DecompositionSeriesDataArray.update_forward_refs()
DecompositionSeriesBandsBandLimitsArray.update_forward_refs()
DecompositionSeriesBandsBandMeanArray.update_forward_refs()
DecompositionSeriesBandsBandStdevArray.update_forward_refs()
UnitsObsIntervalsArray.update_forward_refs()
UnitsWaveformMeanArray.update_forward_refs()
UnitsWaveformSdArray.update_forward_refs()
UnitsWaveformsArray.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
DecompositionSeriesBandsBandName.update_forward_refs()
DecompositionSeriesBandsBandLimits.update_forward_refs()
DecompositionSeriesBandsBandMean.update_forward_refs()
DecompositionSeriesBandsBandStdev.update_forward_refs()
UnitsSpikeTimes.update_forward_refs()
UnitsObsIntervals.update_forward_refs()
UnitsElectrodeGroup.update_forward_refs()
UnitsWaveformMean.update_forward_refs()
UnitsWaveformSd.update_forward_refs()
UnitsWaveforms.update_forward_refs()
VectorIndex.update_forward_refs()
UnitsSpikeTimesIndex.update_forward_refs()
UnitsObsIntervalsIndex.update_forward_refs()
UnitsElectrodesIndex.update_forward_refs()
UnitsWaveformsIndex.update_forward_refs()
UnitsWaveformsIndexIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DecompositionSeriesSourceChannels.update_forward_refs()
UnitsElectrodes.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
DecompositionSeriesBands.update_forward_refs()
Units.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
NWBData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferences.update_forward_refs()
ImageReferencesArray.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
AbstractFeatureSeries.update_forward_refs()
AnnotationSeries.update_forward_refs()
IntervalSeries.update_forward_refs()
DecompositionSeries.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()

View file

@ -1,475 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class OptogeneticSeriesData(ConfiguredBaseModel):
"""
Applied power for optogenetic stimulus, in watts.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for data, which is fixed to 'watts'.""")
array: Optional[OptogeneticSeriesDataArray] = Field(None)
class OptogeneticStimulusSiteDescription(ConfiguredBaseModel):
"""
Description of stimulation site.
"""
None
class OptogeneticStimulusSiteExcitationLambda(ConfiguredBaseModel):
"""
Excitation wavelength, in nm.
"""
None
class OptogeneticStimulusSiteLocation(ConfiguredBaseModel):
"""
Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.
"""
None
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class OptogeneticSeriesDataArray(Arraylike):
num_times: float = Field(...)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
"""
description: OptogeneticStimulusSiteDescription = Field(..., description="""Description of stimulation site.""")
excitation_lambda: OptogeneticStimulusSiteExcitationLambda = Field(..., description="""Excitation wavelength, in nm.""")
location: OptogeneticStimulusSiteLocation = Field(..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class OptogeneticSeries(TimeSeries):
"""
An optogenetic stimulus.
"""
data: OptogeneticSeriesData = Field(..., description="""Applied power for optogenetic stimulus, in watts.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
OptogeneticSeriesData.update_forward_refs()
OptogeneticStimulusSiteDescription.update_forward_refs()
OptogeneticStimulusSiteExcitationLambda.update_forward_refs()
OptogeneticStimulusSiteLocation.update_forward_refs()
Arraylike.update_forward_refs()
OptogeneticSeriesDataArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
OptogeneticStimulusSite.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
OptogeneticSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

File diff suppressed because it is too large Load diff

View file

@ -1,568 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the first measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis1PhaseMapArray] = Field(None)
class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
"""
Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis1PowerMapArray] = Field(None)
class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the second measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis2PhaseMapArray] = Field(None)
class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
"""
Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[ImagingRetinotopyAxis2PowerMapArray] = Field(None)
class ImagingRetinotopyAxisDescriptions(ConfiguredBaseModel):
"""
Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].
"""
array: Optional[ImagingRetinotopyAxisDescriptionsArray] = Field(None)
class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
"""
Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[ImagingRetinotopyFocalDepthImageArray] = Field(None)
class ImagingRetinotopySignMap(ConfiguredBaseModel):
"""
Sine of the angle between the direction of the gradient in axis_1 and axis_2.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[ImagingRetinotopySignMapArray] = Field(None)
class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
"""
Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[ImagingRetinotopyVasculatureImageArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class ImagingRetinotopyAxis1PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis1PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxisDescriptionsArray(Arraylike):
axis_1_axis_2: str = Field(...)
class ImagingRetinotopyFocalDepthImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
class ImagingRetinotopySignMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyVasculatureImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageReferencesArray(Arraylike):
num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[TimeSeriesDataArray] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
array: Optional[TimeSeriesTimestampsArray] = Field(None)
class TimeSeriesTimestampsArray(Arraylike):
num_times: float = Field(...)
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
array: Optional[TimeSeriesControlArray] = Field(None)
class TimeSeriesControlArray(Arraylike):
num_times: int = Field(...)
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
array: Optional[TimeSeriesControlDescriptionArray] = Field(None)
class TimeSeriesControlDescriptionArray(Arraylike):
num_control_values: str = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[ImageArray] = Field(None)
class ImageReferences(NWBData):
"""
Ordered dataset of references to Image objects.
"""
array: Optional[ImageReferencesArray] = Field(None)
class ImagesOrderOfImages(ImageReferences):
"""
Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
"""
array: Optional[ImageReferencesArray] = Field(None)
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class TimeSeriesReferenceVectorData(VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class ImagingRetinotopy(NWBDataInterface):
"""
Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x).
"""
axis_1_phase_map: ImagingRetinotopyAxis1PhaseMap = Field(..., description="""Phase response to stimulus on the first measured axis.""")
axis_1_power_map: Optional[ImagingRetinotopyAxis1PowerMap] = Field(None, description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_2_phase_map: ImagingRetinotopyAxis2PhaseMap = Field(..., description="""Phase response to stimulus on the second measured axis.""")
axis_2_power_map: Optional[ImagingRetinotopyAxis2PowerMap] = Field(None, description="""Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_descriptions: ImagingRetinotopyAxisDescriptions = Field(..., description="""Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].""")
focal_depth_image: Optional[ImagingRetinotopyFocalDepthImage] = Field(None, description="""Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].""")
sign_map: Optional[ImagingRetinotopySignMap] = Field(None, description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""")
vasculature_image: ImagingRetinotopyVasculatureImage = Field(..., description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""")
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class ProcessingModule(NWBContainer):
"""
A collection of processed data.
"""
description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
class Images(NWBDataInterface):
"""
A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
"""
description: Optional[str] = Field(None, description="""Description of this collection of images.""")
Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImagingRetinotopyAxis1PhaseMap.update_forward_refs()
ImagingRetinotopyAxis1PowerMap.update_forward_refs()
ImagingRetinotopyAxis2PhaseMap.update_forward_refs()
ImagingRetinotopyAxis2PowerMap.update_forward_refs()
ImagingRetinotopyAxisDescriptions.update_forward_refs()
ImagingRetinotopyFocalDepthImage.update_forward_refs()
ImagingRetinotopySignMap.update_forward_refs()
ImagingRetinotopyVasculatureImage.update_forward_refs()
Arraylike.update_forward_refs()
ImagingRetinotopyAxis1PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis1PowerMapArray.update_forward_refs()
ImagingRetinotopyAxis2PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis2PowerMapArray.update_forward_refs()
ImagingRetinotopyAxisDescriptionsArray.update_forward_refs()
ImagingRetinotopyFocalDepthImageArray.update_forward_refs()
ImagingRetinotopySignMapArray.update_forward_refs()
ImagingRetinotopyVasculatureImageArray.update_forward_refs()
ImageArray.update_forward_refs()
ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesTimestampsArray.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlArray.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesControlDescriptionArray.update_forward_refs()
TimeSeriesSync.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
NWBData.update_forward_refs()
Image.update_forward_refs()
ImageReferences.update_forward_refs()
ImagesOrderOfImages.update_forward_refs()
VectorData.update_forward_refs()
TimeSeriesReferenceVectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
ImagingRetinotopy.update_forward_refs()
TimeSeries.update_forward_refs()
ProcessingModule.update_forward_refs()
Images.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,141 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_base_include import (
TimeSeriesSync,
TimeSeriesControlDescription,
# ImageReferencesArray,
ImageArray,
TimeSeriesControl,
TimeSeriesData,
# ImagesOrderOfImages,
TimeSeriesTimestamps,
TimeSeriesStartingTime
)
from .hdmf_common_base import (
Container,
Data
)
# from .hdmf_common_table import (
# DynamicTable,
# VectorData
# )
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class NWBData(Data):
"""
An abstract data type for a dataset.
"""
None
# class TimeSeriesReferenceVectorData(VectorData):
# """
# Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
# """
# description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
# array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any]] = Field(None)
#
class Image(NWBData):
"""
An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
"""
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[NDArray[Shape["* x, * y, 3 r_g_b, 4 r_g_b_a"], Number]] = Field(None)
# class ImageReferences(NWBData):
# """
# Ordered dataset of references to Image objects.
# """
# array: Optional[List[Image] | Image] = Field(None)
#
class NWBContainer(Container):
"""
An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class NWBDataInterface(NWBContainer):
"""
An abstract data type for a generic container storing collections of data, as opposed to metadata.
"""
None
class TimeSeries(NWBDataInterface):
"""
General purpose time series.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
# class ProcessingModule(NWBContainer):
# """
# A collection of processed data.
# """
# description: Optional[str] = Field(None, description="""Description of this collection of processed data.""")
# NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Data objects stored in this collection.""")
# DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tables stored in this collection.""")
#
# class Images(NWBDataInterface):
# """
# A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
# """
# description: Optional[str] = Field(None, description="""Description of this collection of images.""")
# Image: List[Image] = Field(default_factory=list, description="""Images stored in this collection.""")
# order_of_images: Optional[ImagesOrderOfImages] = Field(None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""")
#
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
NWBData.update_forward_refs()
# TimeSeriesReferenceVectorData.update_forward_refs()
Image.update_forward_refs()
# ImageReferences.update_forward_refs()
NWBContainer.update_forward_refs()
NWBDataInterface.update_forward_refs()
TimeSeries.update_forward_refs()
# ProcessingModule.update_forward_refs()
# Images.update_forward_refs()

View file

@ -0,0 +1,130 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .nwb_language import (
Arraylike
)
# from .core_nwb_base import (
# ImageReferences,
# Image
# )
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ImageArray(Arraylike):
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
# class ImageReferencesArray(Arraylike):
#
# num_images: Image = Field(...)
class TimeSeriesData(ConfiguredBaseModel):
"""
Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""")
offset: Optional[float] = Field(None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""")
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
continuity: Optional[str] = Field(None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""")
array: Optional[NDArray[Shape["* num_times, ..."], Number]] = Field(None)
class TimeSeriesDataArray(Arraylike):
num_times: Any = Field(...)
num_DIM2: Optional[Any] = Field(None)
num_DIM3: Optional[Any] = Field(None)
num_DIM4: Optional[Any] = Field(None)
class TimeSeriesStartingTime(ConfiguredBaseModel):
"""
Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.
"""
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement for time, which is fixed to 'seconds'.""")
class TimeSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
class TimeSeriesControl(ConfiguredBaseModel):
"""
Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.
"""
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
class TimeSeriesControlDescription(ConfiguredBaseModel):
"""
Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.
"""
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
class TimeSeriesSync(ConfiguredBaseModel):
"""
Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.
"""
None
# class ImagesOrderOfImages(ImageReferences):
# """
# Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.
# """
# array: Optional[List[Image] | Image] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImageArray.update_forward_refs()
# ImageReferencesArray.update_forward_refs()
TimeSeriesData.update_forward_refs()
TimeSeriesDataArray.update_forward_refs()
TimeSeriesStartingTime.update_forward_refs()
TimeSeriesTimestamps.update_forward_refs()
TimeSeriesControl.update_forward_refs()
TimeSeriesControlDescription.update_forward_refs()
TimeSeriesSync.update_forward_refs()
# ImagesOrderOfImages.update_forward_refs()

View file

@ -0,0 +1,118 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_behavior_include import (
SpatialSeriesData
)
from .core_nwb_base import (
TimeSeries,
NWBDataInterface
)
from .core_nwb_misc import (
IntervalSeries
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class SpatialSeries(TimeSeries):
"""
Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values.
"""
data: SpatialSeriesData = Field(..., description="""1-D or 2-D array storing position or direction relative to some reference frame.""")
reference_frame: Optional[str] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class BehavioralEpochs(NWBDataInterface):
"""
TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data.
"""
IntervalSeries: Optional[List[IntervalSeries]] = Field(default_factory=list, description="""IntervalSeries object containing start and stop times of epochs.""")
class BehavioralEvents(NWBDataInterface):
"""
TimeSeries for storing behavioral events. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing behavioral events.""")
class BehavioralTimeSeries(NWBDataInterface):
"""
TimeSeries for storing Behavoioral time series data. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries object containing continuous behavioral data.""")
class PupilTracking(NWBDataInterface):
"""
Eye-tracking data, representing pupil size.
"""
TimeSeries: List[TimeSeries] = Field(default_factory=list, description="""TimeSeries object containing time series data on pupil size.""")
class EyeTracking(NWBDataInterface):
"""
Eye-tracking data, representing direction of gaze.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing data measuring direction of gaze.""")
class CompassDirection(NWBDataInterface):
"""
With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees.
"""
SpatialSeries: Optional[List[SpatialSeries]] = Field(default_factory=list, description="""SpatialSeries object containing direction of gaze travel.""")
class Position(NWBDataInterface):
"""
Position data, whether along the x, x/y or x/y/z axis.
"""
SpatialSeries: List[SpatialSeries] = Field(default_factory=list, description="""SpatialSeries object containing position data.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
SpatialSeries.update_forward_refs()
BehavioralEpochs.update_forward_refs()
BehavioralEvents.update_forward_refs()
BehavioralTimeSeries.update_forward_refs()
PupilTracking.update_forward_refs()
EyeTracking.update_forward_refs()
CompassDirection.update_forward_refs()
Position.update_forward_refs()

View file

@ -0,0 +1,55 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class SpatialSeriesData(ConfiguredBaseModel):
"""
1-D or 2-D array storing position or direction relative to some reference frame.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
array: Optional[NDArray[Shape["* num_times, 1 x, 2 x_y, 3 x_y_z"], Number]] = Field(None)
class SpatialSeriesDataArray(Arraylike):
num_times: float = Field(...)
x: Optional[float] = Field(None)
xy: Optional[float] = Field(None)
xyz: Optional[float] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
SpatialSeriesData.update_forward_refs()
SpatialSeriesDataArray.update_forward_refs()

View file

@ -0,0 +1,46 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_base import (
NWBContainer
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class Device(NWBContainer):
"""
Metadata about a data acquisition device, e.g., recording system, electrode, microscope.
"""
description: Optional[str] = Field(None, description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""")
manufacturer: Optional[str] = Field(None, description="""The name of the manufacturer of the device.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Device.update_forward_refs()

View file

@ -0,0 +1,33 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/

View file

@ -0,0 +1,170 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_ecephys_include import (
ClusteringPeakOverRms,
FeatureExtractionDescription,
FeatureExtractionFeatures,
EventDetectionSourceIdx,
ClusteringTimes,
SpikeEventSeriesTimestamps,
EventDetectionTimes,
ClusterWaveformsWaveformMean,
SpikeEventSeriesData,
FeatureExtractionTimes,
ClusteringNum,
ElectricalSeriesChannelConversion,
ClusterWaveformsWaveformSd,
ElectricalSeriesData,
ElectricalSeriesElectrodes,
FeatureExtractionElectrodes
)
from .core_nwb_base import (
TimeSeries,
NWBContainer,
NWBDataInterface
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ElectricalSeries(TimeSeries):
"""
A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels.
"""
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
"""
data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: SpikeEventSeriesTimestamps = Field(..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""")
filtering: Optional[str] = Field(None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""")
electrodes: ElectricalSeriesElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
channel_conversion: Optional[ElectricalSeriesChannelConversion] = Field(None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
"""
description: FeatureExtractionDescription = Field(..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""")
features: FeatureExtractionFeatures = Field(..., description="""Multi-dimensional array of features extracted from each event.""")
times: FeatureExtractionTimes = Field(..., description="""Times of events that features correspond to (can be a link).""")
electrodes: FeatureExtractionElectrodes = Field(..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""")
class EventDetection(NWBDataInterface):
"""
Detected spike events from voltage trace(s).
"""
detection_method: str = Field(..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""")
source_idx: EventDetectionSourceIdx = Field(..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""")
times: EventDetectionTimes = Field(..., description="""Timestamps of events, in seconds.""")
class EventWaveform(NWBDataInterface):
"""
Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition.
"""
SpikeEventSeries: Optional[List[SpikeEventSeries]] = Field(default_factory=list, description="""SpikeEventSeries object(s) containing detected spike event waveforms.""")
class FilteredEphys(NWBDataInterface):
"""
Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing filtered electrophysiology data.""")
class LFP(NWBDataInterface):
"""
LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
"""
ElectricalSeries: List[ElectricalSeries] = Field(default_factory=list, description="""ElectricalSeries object(s) containing LFP data for one or more channels.""")
class ElectrodeGroup(NWBContainer):
"""
A physical grouping of electrodes, e.g. a shank of an array.
"""
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
location: Optional[str] = Field(None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""")
position: Optional[Any] = Field(None, description="""stereotaxic or common framework coordinates""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
waveform_filtering: str = Field(..., description="""Filtering applied to data before generating mean/sd""")
waveform_mean: ClusterWaveformsWaveformMean = Field(..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""")
waveform_sd: ClusterWaveformsWaveformSd = Field(..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""")
class Clustering(NWBDataInterface):
"""
DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting.
"""
description: str = Field(..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""")
num: ClusteringNum = Field(..., description="""Cluster number of each event""")
peak_over_rms: ClusteringPeakOverRms = Field(..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""")
times: ClusteringTimes = Field(..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ElectricalSeries.update_forward_refs()
SpikeEventSeries.update_forward_refs()
FeatureExtraction.update_forward_refs()
EventDetection.update_forward_refs()
EventWaveform.update_forward_refs()
FilteredEphys.update_forward_refs()
LFP.update_forward_refs()
ElectrodeGroup.update_forward_refs()
ClusterWaveforms.update_forward_refs()
Clustering.update_forward_refs()

View file

@ -0,0 +1,217 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion
)
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ElectricalSeriesData(ConfiguredBaseModel):
"""
Recorded voltage data.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""")
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_samples"], Number]] = Field(None)
class ElectricalSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_channels: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ElectricalSeriesElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class ElectricalSeriesChannelConversion(ConfiguredBaseModel):
"""
Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.
"""
axis: Optional[int] = Field(None, description="""The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1.""")
channel_conversion: Optional[List[float]] = Field(default_factory=list, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""")
class SpikeEventSeriesData(ConfiguredBaseModel):
"""
Spike waveforms.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for waveforms, which is fixed to 'volts'.""")
array: Optional[NDArray[Shape["* num_events, * num_samples, * num_channels"], Number]] = Field(None)
class SpikeEventSeriesDataArray(Arraylike):
num_events: float = Field(...)
num_samples: float = Field(...)
num_channels: Optional[float] = Field(None)
class SpikeEventSeriesTimestamps(ConfiguredBaseModel):
"""
Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.
"""
interval: Optional[int] = Field(None, description="""Value is '1'""")
unit: Optional[str] = Field(None, description="""Unit of measurement for timestamps, which is fixed to 'seconds'.""")
timestamps: List[float] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""")
class FeatureExtractionDescription(ConfiguredBaseModel):
"""
Description of features (eg, ''PC1'') for each of the extracted features.
"""
description: List[str] = Field(default_factory=list, description="""Description of features (eg, ''PC1'') for each of the extracted features.""")
class FeatureExtractionFeatures(ConfiguredBaseModel):
"""
Multi-dimensional array of features extracted from each event.
"""
array: Optional[NDArray[Shape["* num_events, * num_channels, * num_features"], Float32]] = Field(None)
class FeatureExtractionFeaturesArray(Arraylike):
num_events: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_features: Optional[float] = Field(None)
class FeatureExtractionTimes(ConfiguredBaseModel):
"""
Times of events that features correspond to (can be a link).
"""
times: List[float] = Field(default_factory=list, description="""Times of events that features correspond to (can be a link).""")
class FeatureExtractionElectrodes(DynamicTableRegion):
"""
DynamicTableRegion pointer to the electrodes that this time series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class EventDetectionSourceIdx(ConfiguredBaseModel):
"""
Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.
"""
source_idx: List[int] = Field(default_factory=list, description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""")
class EventDetectionTimes(ConfiguredBaseModel):
"""
Timestamps of events, in seconds.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for event times, which is fixed to 'seconds'.""")
times: List[float] = Field(default_factory=list, description="""Timestamps of events, in seconds.""")
class ClusterWaveformsWaveformMean(ConfiguredBaseModel):
"""
The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)
"""
array: Optional[NDArray[Shape["* num_clusters, * num_samples"], Float32]] = Field(None)
class ClusterWaveformsWaveformMeanArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusterWaveformsWaveformSd(ConfiguredBaseModel):
"""
Stdev of waveforms for each cluster, using the same indices as in mean
"""
array: Optional[NDArray[Shape["* num_clusters, * num_samples"], Float32]] = Field(None)
class ClusterWaveformsWaveformSdArray(Arraylike):
num_clusters: Optional[float] = Field(None)
num_samples: Optional[float] = Field(None)
class ClusteringNum(ConfiguredBaseModel):
"""
Cluster number of each event
"""
num: List[int] = Field(default_factory=list, description="""Cluster number of each event""")
class ClusteringPeakOverRms(ConfiguredBaseModel):
"""
Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).
"""
peak_over_rms: List[float] = Field(default_factory=list, description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""")
class ClusteringTimes(ConfiguredBaseModel):
"""
Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.
"""
times: List[float] = Field(default_factory=list, description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ElectricalSeriesData.update_forward_refs()
ElectricalSeriesDataArray.update_forward_refs()
ElectricalSeriesElectrodes.update_forward_refs()
ElectricalSeriesChannelConversion.update_forward_refs()
SpikeEventSeriesData.update_forward_refs()
SpikeEventSeriesDataArray.update_forward_refs()
SpikeEventSeriesTimestamps.update_forward_refs()
FeatureExtractionDescription.update_forward_refs()
FeatureExtractionFeatures.update_forward_refs()
FeatureExtractionFeaturesArray.update_forward_refs()
FeatureExtractionTimes.update_forward_refs()
FeatureExtractionElectrodes.update_forward_refs()
EventDetectionSourceIdx.update_forward_refs()
EventDetectionTimes.update_forward_refs()
ClusterWaveformsWaveformMean.update_forward_refs()
ClusterWaveformsWaveformMeanArray.update_forward_refs()
ClusterWaveformsWaveformSd.update_forward_refs()
ClusterWaveformsWaveformSdArray.update_forward_refs()
ClusteringNum.update_forward_refs()
ClusteringPeakOverRms.update_forward_refs()
ClusteringTimes.update_forward_refs()

View file

@ -0,0 +1,60 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_epoch_include import (
TimeIntervalsTimeseriesIndex,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries
)
from .hdmf_common_table import (
DynamicTable
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class TimeIntervals(DynamicTable):
"""
A container for aggregating epoch data and the TimeSeries that each epoch applies to.
"""
start_time: Optional[List[float]] = Field(default_factory=list, description="""Start time of epoch, in seconds.""")
stop_time: Optional[List[float]] = Field(default_factory=list, description="""Stop time of epoch, in seconds.""")
tags: Optional[List[str]] = Field(default_factory=list, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
TimeIntervals.update_forward_refs()

View file

@ -0,0 +1,70 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeriesReferenceVectorData
)
from .hdmf_common_table import (
VectorIndex
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class TimeIntervalsTagsIndex(VectorIndex):
"""
Index for tags.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData):
"""
An index into a TimeSeries object.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
TimeIntervalsTagsIndex.update_forward_refs()
TimeIntervalsTimeseries.update_forward_refs()
TimeIntervalsTimeseriesIndex.update_forward_refs()

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_file_include import (
NWBFileScratch,
NWBFileGeneral,
NWBFileIntervals,
NWBFileFileCreateDate,
NWBFileUnits,
SubjectAge,
NWBFileStimulus,
NWBFileAcquisition,
NWBFileProcessing,
NWBFileAnalysis
)
from .core_nwb_base import (
NWBContainer,
NWBData
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ScratchData(NWBData):
"""
Any one-off datasets
"""
notes: Optional[str] = Field(None, description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
"""
An NWB file storing cellular-based neurophysiology data from a single experimental session.
"""
nwb_version: Optional[str] = Field(None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""")
file_create_date: NWBFileFileCreateDate = Field(..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""")
identifier: str = Field(..., description="""A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files.""")
session_description: str = Field(..., description="""A description of the experimental session and data in the file.""")
session_start_time: date = Field(..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""")
timestamps_reference_time: date = Field(..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""")
acquisition: NWBFileAcquisition = Field(..., description="""Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis.""")
analysis: NWBFileAnalysis = Field(..., description="""Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs.""")
scratch: Optional[NWBFileScratch] = Field(None, description="""A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard.""")
processing: NWBFileProcessing = Field(..., description="""The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis.""")
stimulus: NWBFileStimulus = Field(..., description="""Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file.""")
general: NWBFileGeneral = Field(..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""")
intervals: Optional[NWBFileIntervals] = Field(None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""")
units: Optional[NWBFileUnits] = Field(None, description="""Data about sorted spike units.""")
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
"""
None
class Subject(NWBContainer):
"""
Information about the animal or person from which the data was measured.
"""
age: Optional[SubjectAge] = Field(None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""")
date_of_birth: Optional[date] = Field(None, description="""Date of birth of subject. Can be supplied instead of 'age'.""")
description: Optional[str] = Field(None, description="""Description of subject and where subject came from (e.g., breeder, if animal).""")
genotype: Optional[str] = Field(None, description="""Genetic strain. If absent, assume Wild Type (WT).""")
sex: Optional[str] = Field(None, description="""Gender of subject.""")
species: Optional[str] = Field(None, description="""Species of subject.""")
strain: Optional[str] = Field(None, description="""Strain of subject.""")
subject_id: Optional[str] = Field(None, description="""ID of animal/person used/participating in experiment (lab convention).""")
weight: Optional[str] = Field(None, description="""Weight at time of experiment, at time of surgery and at other important times.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ScratchData.update_forward_refs()
NWBFile.update_forward_refs()
LabMetaData.update_forward_refs()
Subject.update_forward_refs()

View file

@ -0,0 +1,480 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_file import (
LabMetaData,
ScratchData,
Subject
)
from .core_nwb_misc import (
Units
)
from .core_nwb_base import (
TimeSeries,
Images,
NWBContainer,
NWBDataInterface,
ProcessingModule
)
from .core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
IntracellularElectrode,
SimultaneousRecordingsTable,
SweepTable,
SequentialRecordingsTable,
IntracellularRecordingsTable
)
from .core_nwb_epoch import (
TimeIntervals
)
from .core_nwb_ogen import (
OptogeneticStimulusSite
)
from .core_nwb_device import (
Device
)
from .core_nwb_ecephys import (
ElectrodeGroup
)
from .hdmf_common_table import (
DynamicTable
)
from .core_nwb_ophys import (
ImagingPlane
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class NWBFileFileCreateDate(ConfiguredBaseModel):
"""
A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.
"""
file_create_date: List[date] = Field(default_factory=list, description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""")
class NWBFileAcquisition(ConfiguredBaseModel):
"""
Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis.
"""
NWBDataInterface: Optional[List[NWBDataInterface]] = Field(default_factory=list, description="""Acquired, raw data.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tabular data that is relevant to acquisition""")
class NWBFileAnalysis(ConfiguredBaseModel):
"""
Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs.
"""
NWBContainer: Optional[List[NWBContainer]] = Field(default_factory=list, description="""Custom analysis results.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Tabular data that is relevant to data stored in analysis""")
class NWBFileScratch(ConfiguredBaseModel):
"""
A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard.
"""
ScratchData: Optional[List[ScratchData]] = Field(default_factory=list, description="""Any one-off datasets""")
NWBContainer: Optional[List[NWBContainer]] = Field(default_factory=list, description="""Any one-off containers""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""Any one-off tables""")
class NWBFileProcessing(ConfiguredBaseModel):
"""
The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis.
"""
ProcessingModule: Optional[List[ProcessingModule]] = Field(default_factory=list, description="""Intermediate analysis of acquired data.""")
class NWBFileStimulus(ConfiguredBaseModel):
"""
Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file.
"""
presentation: NWBFileStimulusPresentation = Field(..., description="""Stimuli presented during the experiment.""")
templates: NWBFileStimulusTemplates = Field(..., description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""")
class NWBFileStimulusPresentation(ConfiguredBaseModel):
"""
Stimuli presented during the experiment.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries objects containing data of presented stimuli.""")
class NWBFileStimulusTemplates(ConfiguredBaseModel):
"""
Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.
"""
TimeSeries: Optional[List[TimeSeries]] = Field(default_factory=list, description="""TimeSeries objects containing template data of presented stimuli.""")
Images: Optional[List[Images]] = Field(default_factory=list, description="""Images objects containing images of presented stimuli.""")
class NWBFileGeneral(ConfiguredBaseModel):
"""
Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.
"""
data_collection: Optional[str] = Field(None, description="""Notes about data collection and analysis.""")
experiment_description: Optional[str] = Field(None, description="""General description of the experiment.""")
experimenter: Optional[NWBFileGeneralExperimenter] = Field(None, description="""Name of person(s) who performed the experiment. Can also specify roles of different people involved.""")
institution: Optional[str] = Field(None, description="""Institution(s) where experiment was performed.""")
keywords: Optional[NWBFileGeneralKeywords] = Field(None, description="""Terms to search over.""")
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(None, description="""Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.""")
protocol: Optional[str] = Field(None, description="""Experimental protocol, if applicable. e.g., include IACUC protocol number.""")
related_publications: Optional[NWBFileGeneralRelatedPublications] = Field(None, description="""Publication information. PMID, DOI, URL, etc.""")
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
slices: Optional[str] = Field(None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""")
source_script: Optional[NWBFileGeneralSourceScript] = Field(None, description="""Script file or link to public source code used to create this NWB file.""")
stimulus: Optional[str] = Field(None, description="""Notes about stimuli, such as how and where they were presented.""")
surgery: Optional[str] = Field(None, description="""Narrative description about surgery/surgeries, including date(s) and who performed surgery.""")
virus: Optional[str] = Field(None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""")
LabMetaData: Optional[List[LabMetaData]] = Field(default_factory=list, description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""")
devices: Optional[NWBFileGeneralDevices] = Field(None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""")
subject: Optional[NWBFileGeneralSubject] = Field(None, description="""Information about the animal or person from which the data was measured.""")
extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(None, description="""Metadata related to extracellular electrophysiology.""")
intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(None, description="""Metadata related to intracellular electrophysiology.""")
optogenetics: Optional[NWBFileGeneralOptogenetics] = Field(None, description="""Metadata describing optogenetic stimuluation.""")
optophysiology: Optional[NWBFileGeneralOptophysiology] = Field(None, description="""Metadata related to optophysiology.""")
class NWBFileGeneralExperimenter(ConfiguredBaseModel):
"""
Name of person(s) who performed the experiment. Can also specify roles of different people involved.
"""
experimenter: Optional[List[str]] = Field(default_factory=list, description="""Name of person(s) who performed the experiment. Can also specify roles of different people involved.""")
class NWBFileGeneralKeywords(ConfiguredBaseModel):
"""
Terms to search over.
"""
keywords: Optional[List[str]] = Field(default_factory=list, description="""Terms to search over.""")
class NWBFileGeneralRelatedPublications(ConfiguredBaseModel):
"""
Publication information. PMID, DOI, URL, etc.
"""
related_publications: Optional[List[str]] = Field(default_factory=list, description="""Publication information. PMID, DOI, URL, etc.""")
class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
file_name: Optional[str] = Field(None, description="""Name of script file.""")
class NWBFileGeneralDevices(ConfiguredBaseModel):
"""
Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.
"""
Device: Optional[List[Device]] = Field(default_factory=list, description="""Data acquisition devices.""")
class NWBFileGeneralSubject(Subject):
"""
Information about the animal or person from which the data was measured.
"""
age: Optional[SubjectAge] = Field(None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""")
date_of_birth: Optional[date] = Field(None, description="""Date of birth of subject. Can be supplied instead of 'age'.""")
description: Optional[str] = Field(None, description="""Description of subject and where subject came from (e.g., breeder, if animal).""")
genotype: Optional[str] = Field(None, description="""Genetic strain. If absent, assume Wild Type (WT).""")
sex: Optional[str] = Field(None, description="""Gender of subject.""")
species: Optional[str] = Field(None, description="""Species of subject.""")
strain: Optional[str] = Field(None, description="""Strain of subject.""")
subject_id: Optional[str] = Field(None, description="""ID of animal/person used/participating in experiment (lab convention).""")
weight: Optional[str] = Field(None, description="""Weight at time of experiment, at time of surgery and at other important times.""")
class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
ElectrodeGroup: Optional[List[ElectrodeGroup]] = Field(default_factory=list, description="""Physical group of electrodes.""")
electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(None, description="""A table of all electrodes (i.e. channels) used for recording.""")
class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
x: Optional[List[float]] = Field(default_factory=list, description="""x coordinate of the channel location in the brain (+x is posterior).""")
y: Optional[List[float]] = Field(default_factory=list, description="""y coordinate of the channel location in the brain (+y is inferior).""")
z: Optional[List[float]] = Field(default_factory=list, description="""z coordinate of the channel location in the brain (+z is right).""")
imp: Optional[List[float]] = Field(default_factory=list, description="""Impedance of the channel, in ohms.""")
location: Optional[List[str]] = Field(default_factory=list, description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
filtering: Optional[List[str]] = Field(default_factory=list, description="""Description of hardware filtering, including the filter name and frequency cutoffs.""")
group: Optional[List[ElectrodeGroup]] = Field(default_factory=list, description="""Reference to the ElectrodeGroup this electrode is a part of.""")
group_name: Optional[List[str]] = Field(default_factory=list, description="""Name of the ElectrodeGroup this electrode is a part of.""")
rel_x: Optional[List[float]] = Field(default_factory=list, description="""x coordinate in electrode group""")
rel_y: Optional[List[float]] = Field(default_factory=list, description="""y coordinate in electrode group""")
rel_z: Optional[List[float]] = Field(default_factory=list, description="""z coordinate in electrode group""")
reference: Optional[List[str]] = Field(default_factory=list, description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
filtering: Optional[str] = Field(None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""")
IntracellularElectrode: Optional[List[IntracellularElectrode]] = Field(default_factory=list, description="""An intracellular electrode.""")
sweep_table: Optional[NWBFileGeneralIntracellularEphysSweepTable] = Field(None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""")
intracellular_recordings: Optional[NWBFileGeneralIntracellularEphysIntracellularRecordings] = Field(None, description="""A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.""")
simultaneous_recordings: Optional[NWBFileGeneralIntracellularEphysSimultaneousRecordings] = Field(None, description="""A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes""")
sequential_recordings: Optional[NWBFileGeneralIntracellularEphysSequentialRecordings] = Field(None, description="""A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence.""")
repetitions: Optional[NWBFileGeneralIntracellularEphysRepetitions] = Field(None, description="""A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.""")
experimental_conditions: Optional[NWBFileGeneralIntracellularEphysExperimentalConditions] = Field(None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""")
class NWBFileGeneralIntracellularEphysSweepTable(SweepTable):
"""
[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.
"""
sweep_number: Optional[List[int]] = Field(default_factory=list, description="""Sweep number of the PatchClampSeries in that row.""")
series: Optional[List[PatchClampSeries]] = Field(default_factory=list, description="""The PatchClampSeries with the sweep number in that row.""")
series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphysIntracellularRecordings(IntracellularRecordingsTable):
"""
A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.
"""
description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""")
electrodes: IntracellularRecordingsTableElectrodes = Field(..., description="""Table for storing intracellular electrode related metadata.""")
stimuli: IntracellularRecordingsTableStimuli = Field(..., description="""Table for storing intracellular stimulus related metadata.""")
responses: IntracellularRecordingsTableResponses = Field(..., description="""Table for storing intracellular response related metadata.""")
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphysSimultaneousRecordings(SimultaneousRecordingsTable):
"""
A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes
"""
recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""")
recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphysSequentialRecordings(SequentialRecordingsTable):
"""
A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence.
"""
simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""")
simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""")
stimulus_type: Optional[List[str]] = Field(default_factory=list, description="""The type of stimulus used for the sequential recording.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphysRepetitions(RepetitionsTable):
"""
A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.
"""
sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""")
sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralIntracellularEphysExperimentalConditions(ExperimentalConditionsTable):
"""
A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.
"""
repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""")
repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileGeneralOptogenetics(ConfiguredBaseModel):
"""
Metadata describing optogenetic stimuluation.
"""
OptogeneticStimulusSite: Optional[List[OptogeneticStimulusSite]] = Field(default_factory=list, description="""An optogenetic stimulation site.""")
class NWBFileGeneralOptophysiology(ConfiguredBaseModel):
"""
Metadata related to optophysiology.
"""
ImagingPlane: Optional[List[ImagingPlane]] = Field(default_factory=list, description="""An imaging plane.""")
class NWBFileIntervals(ConfiguredBaseModel):
"""
Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
"""
epochs: Optional[NWBFileIntervalsEpochs] = Field(None, description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""")
trials: Optional[NWBFileIntervalsTrials] = Field(None, description="""Repeated experimental events that have a logical grouping.""")
invalid_times: Optional[NWBFileIntervalsInvalidTimes] = Field(None, description="""Time intervals that should be removed from analysis.""")
TimeIntervals: Optional[List[TimeIntervals]] = Field(default_factory=list, description="""Optional additional table(s) for describing other experimental time intervals.""")
class NWBFileIntervalsEpochs(TimeIntervals):
"""
Divisions in time marking experimental stages or sub-divisions of a single recording session.
"""
start_time: Optional[List[float]] = Field(default_factory=list, description="""Start time of epoch, in seconds.""")
stop_time: Optional[List[float]] = Field(default_factory=list, description="""Stop time of epoch, in seconds.""")
tags: Optional[List[str]] = Field(default_factory=list, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileIntervalsTrials(TimeIntervals):
"""
Repeated experimental events that have a logical grouping.
"""
start_time: Optional[List[float]] = Field(default_factory=list, description="""Start time of epoch, in seconds.""")
stop_time: Optional[List[float]] = Field(default_factory=list, description="""Stop time of epoch, in seconds.""")
tags: Optional[List[str]] = Field(default_factory=list, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileIntervalsInvalidTimes(TimeIntervals):
"""
Time intervals that should be removed from analysis.
"""
start_time: Optional[List[float]] = Field(default_factory=list, description="""Start time of epoch, in seconds.""")
stop_time: Optional[List[float]] = Field(default_factory=list, description="""Stop time of epoch, in seconds.""")
tags: Optional[List[str]] = Field(default_factory=list, description="""User-defined tags that identify or categorize events.""")
tags_index: Optional[TimeIntervalsTagsIndex] = Field(None, description="""Index for tags.""")
timeseries: Optional[TimeIntervalsTimeseries] = Field(None, description="""An index into a TimeSeries object.""")
timeseries_index: Optional[TimeIntervalsTimeseriesIndex] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class NWBFileUnits(Units):
"""
Data about sorted spike units.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[List[ElectrodeGroup]] = Field(default_factory=list, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SubjectAge(ConfiguredBaseModel):
"""
Age of subject. Can be supplied instead of 'date_of_birth'.
"""
reference: Optional[str] = Field(None, description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
NWBFileFileCreateDate.update_forward_refs()
NWBFileAcquisition.update_forward_refs()
NWBFileAnalysis.update_forward_refs()
NWBFileScratch.update_forward_refs()
NWBFileProcessing.update_forward_refs()
NWBFileStimulus.update_forward_refs()
NWBFileStimulusPresentation.update_forward_refs()
NWBFileStimulusTemplates.update_forward_refs()
NWBFileGeneral.update_forward_refs()
NWBFileGeneralExperimenter.update_forward_refs()
NWBFileGeneralKeywords.update_forward_refs()
NWBFileGeneralRelatedPublications.update_forward_refs()
NWBFileGeneralSourceScript.update_forward_refs()
NWBFileGeneralDevices.update_forward_refs()
NWBFileGeneralSubject.update_forward_refs()
NWBFileGeneralExtracellularEphys.update_forward_refs()
NWBFileGeneralExtracellularEphysElectrodes.update_forward_refs()
NWBFileGeneralIntracellularEphys.update_forward_refs()
NWBFileGeneralIntracellularEphysSweepTable.update_forward_refs()
NWBFileGeneralIntracellularEphysIntracellularRecordings.update_forward_refs()
NWBFileGeneralIntracellularEphysSimultaneousRecordings.update_forward_refs()
NWBFileGeneralIntracellularEphysSequentialRecordings.update_forward_refs()
NWBFileGeneralIntracellularEphysRepetitions.update_forward_refs()
NWBFileGeneralIntracellularEphysExperimentalConditions.update_forward_refs()
NWBFileGeneralOptogenetics.update_forward_refs()
NWBFileGeneralOptophysiology.update_forward_refs()
NWBFileIntervals.update_forward_refs()
NWBFileIntervalsEpochs.update_forward_refs()
NWBFileIntervalsTrials.update_forward_refs()
NWBFileIntervalsInvalidTimes.update_forward_refs()
NWBFileUnits.update_forward_refs()
SubjectAge.update_forward_refs()

View file

@ -0,0 +1,327 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_icephys_include import (
IntracellularRecordingsTableResponses,
VoltageClampSeriesResistanceCompBandwidth,
RepetitionsTableSequentialRecordingsIndex,
ExperimentalConditionsTableRepetitionsIndex,
SequentialRecordingsTableSimultaneousRecordingsIndex,
IntracellularResponsesTableResponse,
VoltageClampSeriesWholeCellCapacitanceComp,
CurrentClampStimulusSeriesData,
IntracellularRecordingsTableElectrodes,
RepetitionsTableSequentialRecordings,
VoltageClampSeriesCapacitanceSlow,
IntracellularStimuliTableStimulus,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampSeriesData,
ExperimentalConditionsTableRepetitions,
PatchClampSeriesData,
VoltageClampSeriesResistanceCompPrediction,
IntracellularRecordingsTableStimuli,
CurrentClampSeriesData,
SimultaneousRecordingsTableRecordingsIndex,
SequentialRecordingsTableSimultaneousRecordings,
VoltageClampStimulusSeriesData,
VoltageClampSeriesResistanceCompCorrection,
SweepTableSeriesIndex,
VoltageClampSeriesCapacitanceFast,
SimultaneousRecordingsTableRecordings
)
from .core_nwb_base import (
TimeSeries,
NWBContainer,
DynamicTable
)
from .hdmf_common_table import (
AlignedDynamicTable
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class PatchClampSeries(TimeSeries):
"""
An abstract base class for patch-clamp data - stimulus or response, current or voltage.
"""
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class CurrentClampSeries(PatchClampSeries):
"""
Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected.
"""
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(None, description="""Capacitance compensation, in farads.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IZeroClampSeries(CurrentClampSeries):
"""
Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell.
"""
stimulus_description: Optional[str] = Field(None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""")
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(..., description="""Capacitance compensation, in farads, fixed to 0.0.""")
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class CurrentClampStimulusSeries(PatchClampSeries):
"""
Stimulus current applied during current clamp recording.
"""
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class VoltageClampSeries(PatchClampSeries):
"""
Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected.
"""
data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(None, description="""Slow capacitance, in farads.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(None, description="""Resistance compensation bandwidth, in hertz.""")
resistance_comp_correction: Optional[VoltageClampSeriesResistanceCompCorrection] = Field(None, description="""Resistance compensation correction, in percent.""")
resistance_comp_prediction: Optional[VoltageClampSeriesResistanceCompPrediction] = Field(None, description="""Resistance compensation prediction, in percent.""")
whole_cell_capacitance_comp: Optional[VoltageClampSeriesWholeCellCapacitanceComp] = Field(None, description="""Whole cell capacitance compensation, in farads.""")
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = Field(None, description="""Whole cell series resistance compensation, in ohms.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class VoltageClampStimulusSeries(PatchClampSeries):
"""
Stimulus voltage applied during a voltage clamp recording.
"""
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntracellularElectrode(NWBContainer):
"""
An intracellular electrode and its metadata.
"""
cell_id: Optional[str] = Field(None, description="""unique ID of the cell""")
description: str = Field(..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""")
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
initial_access_resistance: Optional[str] = Field(None, description="""Initial access resistance.""")
location: Optional[str] = Field(None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
slice: Optional[str] = Field(None, description="""Information about slice used for recording.""")
class SweepTable(DynamicTable):
"""
[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata.
"""
sweep_number: Optional[List[int]] = Field(default_factory=list, description="""Sweep number of the PatchClampSeries in that row.""")
series: Optional[List[PatchClampSeries]] = Field(default_factory=list, description="""The PatchClampSeries with the sweep number in that row.""")
series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularElectrodesTable(DynamicTable):
"""
Table for storing intracellular electrode related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
electrode: Optional[List[IntracellularElectrode]] = Field(default_factory=list, description="""Column for storing the reference to the intracellular electrode.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularStimuliTable(DynamicTable):
"""
Table for storing intracellular stimulus related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularResponsesTable(DynamicTable):
"""
Table for storing intracellular response related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularRecordingsTable(AlignedDynamicTable):
"""
A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.
"""
description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""")
electrodes: IntracellularRecordingsTableElectrodes = Field(..., description="""Table for storing intracellular electrode related metadata.""")
stimuli: IntracellularRecordingsTableStimuli = Field(..., description="""Table for storing intracellular stimulus related metadata.""")
responses: IntracellularRecordingsTableResponses = Field(..., description="""Table for storing intracellular response related metadata.""")
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimultaneousRecordingsTable(DynamicTable):
"""
A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes.
"""
recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""")
recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SequentialRecordingsTable(DynamicTable):
"""
A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence.
"""
simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""")
simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""")
stimulus_type: Optional[List[str]] = Field(default_factory=list, description="""The type of stimulus used for the sequential recording.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class RepetitionsTable(DynamicTable):
"""
A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.
"""
sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""")
sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class ExperimentalConditionsTable(DynamicTable):
"""
A table for grouping different intracellular recording repetitions together that belong to the same experimental condition.
"""
repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""")
repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
PatchClampSeries.update_forward_refs()
CurrentClampSeries.update_forward_refs()
IZeroClampSeries.update_forward_refs()
CurrentClampStimulusSeries.update_forward_refs()
VoltageClampSeries.update_forward_refs()
VoltageClampStimulusSeries.update_forward_refs()
IntracellularElectrode.update_forward_refs()
SweepTable.update_forward_refs()
IntracellularElectrodesTable.update_forward_refs()
IntracellularStimuliTable.update_forward_refs()
IntracellularResponsesTable.update_forward_refs()
IntracellularRecordingsTable.update_forward_refs()
SimultaneousRecordingsTable.update_forward_refs()
SequentialRecordingsTable.update_forward_refs()
RepetitionsTable.update_forward_refs()
ExperimentalConditionsTable.update_forward_refs()

View file

@ -0,0 +1,293 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion,
VectorIndex
)
from .core_nwb_base import (
TimeSeriesReferenceVectorData
)
from .core_nwb_icephys import (
SimultaneousRecordingsTable,
IntracellularElectrodesTable,
IntracellularResponsesTable,
SequentialRecordingsTable,
IntracellularRecordingsTable,
IntracellularStimuliTable,
RepetitionsTable
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class PatchClampSeriesData(ConfiguredBaseModel):
"""
Recorded voltage or current.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
data: List[float] = Field(default_factory=list, description="""Recorded voltage or current.""")
class CurrentClampSeriesData(ConfiguredBaseModel):
"""
Recorded voltage.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"""
Stimulus current applied.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class VoltageClampSeriesData(ConfiguredBaseModel):
"""
Recorded current.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
"""
Slow capacitance, in farads.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""")
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
"""
Resistance compensation correction, in percent.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""")
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
"""
Resistance compensation prediction, in percent.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""")
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
"""
Whole cell capacitance compensation, in farads.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""")
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
Whole cell series resistance compensation, in ohms.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""")
class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"""
Stimulus voltage applied.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class SweepTableSeriesIndex(VectorIndex):
"""
Index for series.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData):
"""
Column storing the reference to the recorded stimulus for the recording (rows).
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData):
"""
Column storing the reference to the recorded response for the recording (rows)
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class IntracellularRecordingsTableElectrodes(IntracellularElectrodesTable):
"""
Table for storing intracellular electrode related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
electrode: Optional[List[IntracellularElectrode]] = Field(default_factory=list, description="""Column for storing the reference to the intracellular electrode.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularRecordingsTableStimuli(IntracellularStimuliTable):
"""
Table for storing intracellular stimulus related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class IntracellularRecordingsTableResponses(IntracellularResponsesTable):
"""
Table for storing intracellular response related metadata.
"""
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"""
A reference to one or more rows in the IntracellularRecordingsTable table.
"""
table: Optional[IntracellularRecordingsTable] = Field(None, description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class SimultaneousRecordingsTableRecordingsIndex(VectorIndex):
"""
Index dataset for the recordings column.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
"""
A reference to one or more rows in the SimultaneousRecordingsTable table.
"""
table: Optional[SimultaneousRecordingsTable] = Field(None, description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex):
"""
Index dataset for the simultaneous_recordings column.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class RepetitionsTableSequentialRecordings(DynamicTableRegion):
"""
A reference to one or more rows in the SequentialRecordingsTable table.
"""
table: Optional[SequentialRecordingsTable] = Field(None, description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class RepetitionsTableSequentialRecordingsIndex(VectorIndex):
"""
Index dataset for the sequential_recordings column.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"""
A reference to one or more rows in the RepetitionsTable table.
"""
table: Optional[RepetitionsTable] = Field(None, description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class ExperimentalConditionsTableRepetitionsIndex(VectorIndex):
"""
Index dataset for the repetitions column.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
PatchClampSeriesData.update_forward_refs()
CurrentClampSeriesData.update_forward_refs()
CurrentClampStimulusSeriesData.update_forward_refs()
VoltageClampSeriesData.update_forward_refs()
VoltageClampSeriesCapacitanceFast.update_forward_refs()
VoltageClampSeriesCapacitanceSlow.update_forward_refs()
VoltageClampSeriesResistanceCompBandwidth.update_forward_refs()
VoltageClampSeriesResistanceCompCorrection.update_forward_refs()
VoltageClampSeriesResistanceCompPrediction.update_forward_refs()
VoltageClampSeriesWholeCellCapacitanceComp.update_forward_refs()
VoltageClampSeriesWholeCellSeriesResistanceComp.update_forward_refs()
VoltageClampStimulusSeriesData.update_forward_refs()
SweepTableSeriesIndex.update_forward_refs()
IntracellularStimuliTableStimulus.update_forward_refs()
IntracellularResponsesTableResponse.update_forward_refs()
IntracellularRecordingsTableElectrodes.update_forward_refs()
IntracellularRecordingsTableStimuli.update_forward_refs()
IntracellularRecordingsTableResponses.update_forward_refs()
SimultaneousRecordingsTableRecordings.update_forward_refs()
SimultaneousRecordingsTableRecordingsIndex.update_forward_refs()
SequentialRecordingsTableSimultaneousRecordings.update_forward_refs()
SequentialRecordingsTableSimultaneousRecordingsIndex.update_forward_refs()
RepetitionsTableSequentialRecordings.update_forward_refs()
RepetitionsTableSequentialRecordingsIndex.update_forward_refs()
ExperimentalConditionsTableRepetitions.update_forward_refs()
ExperimentalConditionsTableRepetitionsIndex.update_forward_refs()

View file

@ -0,0 +1,152 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
# from .core_nwb_image_include import (
# ImageSeriesData,
# OpticalSeriesFieldOfView,
# RGBAImageArray,
# IndexSeriesData,
# ImageSeriesDimension,
# OpticalSeriesData,
# ImageSeriesExternalFile,
# GrayscaleImageArray,
# RGBImageArray
# )
from .core_nwb_base import (
Image,
TimeSeries
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class GrayscaleImage(ConfiguredBaseModel):
"""
A grayscale image.
"""
array: Optional[NDArray[Shape["* x, * y"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBImage(Image):
"""
A color image.
"""
array: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
class RGBAImage(Image):
"""
A color image with transparency.
"""
array: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
#
# class ImageSeries(TimeSeries):
# """
# General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z].
# """
# data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
# dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
# external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
# format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
# description: Optional[str] = Field(None, description="""Description of the time series.""")
# comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
# starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
# timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
# control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
# control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
# sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
#
#
# class ImageMaskSeries(ImageSeries):
# """
# An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed.
# """
# data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
# dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
# external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
# format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
# description: Optional[str] = Field(None, description="""Description of the time series.""")
# comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
# starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
# timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
# control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
# control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
# sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
#
#
# class OpticalSeries(ImageSeries):
# """
# Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important.
# """
# distance: Optional[float] = Field(None, description="""Distance from camera/monitor to target/eye.""")
# field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
# data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""")
# orientation: Optional[str] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""")
# dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
# external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
# format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
# description: Optional[str] = Field(None, description="""Description of the time series.""")
# comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
# starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
# timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
# control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
# control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
# sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
#
#
# class IndexSeries(TimeSeries):
# """
# Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
# """
# data: IndexSeriesData = Field(..., description="""Index of the image (using zero-indexing) in the linked Images object.""")
# description: Optional[str] = Field(None, description="""Description of the time series.""")
# comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
# starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
# timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
# control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
# control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
# sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
#
#
#
# # Update forward refs
# # see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
# GrayscaleImage.update_forward_refs()
# RGBImage.update_forward_refs()
# RGBAImage.update_forward_refs()
# ImageSeries.update_forward_refs()
# ImageMaskSeries.update_forward_refs()
# OpticalSeries.update_forward_refs()
# IndexSeries.update_forward_refs()

View file

@ -0,0 +1,138 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class GrayscaleImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
class RGBImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b: Optional[float] = Field(None)
class RGBAImageArray(Arraylike):
x: Optional[float] = Field(None)
y: Optional[float] = Field(None)
r_g_b_a: Optional[float] = Field(None)
class ImageSeriesData(ConfiguredBaseModel):
"""
Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
"""
array: Optional[NDArray[Shape["* frame, * x, * y, * z"], Number]] = Field(None)
class ImageSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
z: Optional[float] = Field(None)
class ImageSeriesDimension(ConfiguredBaseModel):
"""
Number of pixels on x, y, (and z) axes.
"""
dimension: Optional[List[int]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
"""
starting_frame: Optional[int] = Field(None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""")
external_file: Optional[List[str]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
class OpticalSeriesFieldOfView(ConfiguredBaseModel):
"""
Width, height and depth of image, or imaged area, in meters.
"""
array: Optional[NDArray[Shape["2 width_height, 3 width_height_depth"], Float32]] = Field(None)
class OpticalSeriesFieldOfViewArray(Arraylike):
width_height: Optional[float] = Field(None)
width_height_depth: Optional[float] = Field(None)
class OpticalSeriesData(ConfiguredBaseModel):
"""
Images presented to subject, either grayscale or RGB
"""
array: Optional[NDArray[Shape["* frame, * x, * y, 3 r_g_b"], Number]] = Field(None)
class OpticalSeriesDataArray(Arraylike):
frame: float = Field(...)
x: float = Field(...)
y: float = Field(...)
r_g_b: Optional[float] = Field(None)
class IndexSeriesData(ConfiguredBaseModel):
"""
Index of the image (using zero-indexing) in the linked Images object.
"""
conversion: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
resolution: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
unit: Optional[str] = Field(None, description="""This field is unused by IndexSeries and has the value N/A.""")
data: List[int] = Field(default_factory=list, description="""Index of the image (using zero-indexing) in the linked Images object.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
GrayscaleImageArray.update_forward_refs()
RGBImageArray.update_forward_refs()
RGBAImageArray.update_forward_refs()
ImageSeriesData.update_forward_refs()
ImageSeriesDataArray.update_forward_refs()
ImageSeriesDimension.update_forward_refs()
ImageSeriesExternalFile.update_forward_refs()
OpticalSeriesFieldOfView.update_forward_refs()
OpticalSeriesFieldOfViewArray.update_forward_refs()
OpticalSeriesData.update_forward_refs()
OpticalSeriesDataArray.update_forward_refs()
IndexSeriesData.update_forward_refs()

View file

@ -0,0 +1,155 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_misc_include import (
AbstractFeatureSeriesFeatures,
UnitsElectrodesIndex,
UnitsObsIntervalsIndex,
DecompositionSeriesData,
UnitsSpikeTimes,
UnitsWaveformMean,
UnitsWaveformsIndexIndex,
UnitsWaveforms,
UnitsWaveformsIndex,
AnnotationSeriesData,
UnitsObsIntervals,
AbstractFeatureSeriesFeatureUnits,
DecompositionSeriesBands,
UnitsWaveformSd,
UnitsElectrodes,
IntervalSeriesData,
DecompositionSeriesSourceChannels,
AbstractFeatureSeriesData,
UnitsSpikeTimesIndex
)
from .core_nwb_base import (
TimeSeries
)
from .core_nwb_ecephys import (
ElectrodeGroup
)
from .hdmf_common_table import (
DynamicTable
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class AbstractFeatureSeries(TimeSeries):
"""
Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
"""
data: AbstractFeatureSeriesData = Field(..., description="""Values of each feature at each time.""")
feature_units: Optional[AbstractFeatureSeriesFeatureUnits] = Field(None, description="""Units of each feature.""")
features: AbstractFeatureSeriesFeatures = Field(..., description="""Description of the features represented in TimeSeries::data.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class AnnotationSeries(TimeSeries):
"""
Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
"""
data: AnnotationSeriesData = Field(..., description="""Annotations made during an experiment.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
"""
data: IntervalSeriesData = Field(..., description="""Use values >0 if interval started, <0 if interval ended.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
"""
data: DecompositionSeriesData = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
source_channels: Optional[DecompositionSeriesSourceChannels] = Field(None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""")
bands: DecompositionSeriesBands = Field(..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class Units(DynamicTable):
"""
Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
"""
spike_times_index: Optional[UnitsSpikeTimesIndex] = Field(None, description="""Index into the spike_times dataset.""")
spike_times: Optional[UnitsSpikeTimes] = Field(None, description="""Spike times for each unit in seconds.""")
obs_intervals_index: Optional[UnitsObsIntervalsIndex] = Field(None, description="""Index into the obs_intervals dataset.""")
obs_intervals: Optional[UnitsObsIntervals] = Field(None, description="""Observation intervals for each unit.""")
electrodes_index: Optional[UnitsElectrodesIndex] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[UnitsElectrodes] = Field(None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""")
electrode_group: Optional[List[ElectrodeGroup]] = Field(default_factory=list, description="""Electrode group that each spike unit came from.""")
waveform_mean: Optional[UnitsWaveformMean] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[UnitsWaveformSd] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[UnitsWaveforms] = Field(None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""")
waveforms_index: Optional[UnitsWaveformsIndex] = Field(None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""")
waveforms_index_index: Optional[UnitsWaveformsIndexIndex] = Field(None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
AbstractFeatureSeries.update_forward_refs()
AnnotationSeries.update_forward_refs()
IntervalSeries.update_forward_refs()
DecompositionSeries.update_forward_refs()
Units.update_forward_refs()

View file

@ -0,0 +1,277 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion,
DynamicTable,
VectorIndex,
VectorData
)
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class AbstractFeatureSeriesData(ConfiguredBaseModel):
"""
Values of each feature at each time.
"""
unit: Optional[str] = Field(None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""")
array: Optional[NDArray[Shape["* num_times, * num_features"], Number]] = Field(None)
class AbstractFeatureSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_features: Optional[float] = Field(None)
class AbstractFeatureSeriesFeatureUnits(ConfiguredBaseModel):
"""
Units of each feature.
"""
feature_units: Optional[List[str]] = Field(default_factory=list, description="""Units of each feature.""")
class AbstractFeatureSeriesFeatures(ConfiguredBaseModel):
"""
Description of the features represented in TimeSeries::data.
"""
features: List[str] = Field(default_factory=list, description="""Description of the features represented in TimeSeries::data.""")
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
data: List[str] = Field(default_factory=list, description="""Annotations made during an experiment.""")
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
resolution: Optional[float] = Field(None, description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""")
data: List[int] = Field(default_factory=list, description="""Use values >0 if interval started, <0 if interval ended.""")
class DecompositionSeriesData(ConfiguredBaseModel):
"""
Data decomposed into frequency bands.
"""
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""")
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], Number]] = Field(None)
class DecompositionSeriesDataArray(Arraylike):
num_times: Optional[float] = Field(None)
num_channels: Optional[float] = Field(None)
num_bands: Optional[float] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
"""
DynamicTableRegion pointer to the channels that this decomposition series was generated from.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class DecompositionSeriesBands(DynamicTable):
"""
Table for describing the bands that this series was generated from. There should be one row in this table for each band.
"""
band_name: Optional[List[str]] = Field(default_factory=list, description="""Name of the band, e.g. theta.""")
band_limits: DecompositionSeriesBandsBandLimits = Field(..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""")
band_mean: DecompositionSeriesBandsBandMean = Field(..., description="""The mean Gaussian filters, in Hz.""")
band_stdev: DecompositionSeriesBandsBandStdev = Field(..., description="""The standard deviation of Gaussian filters, in Hz.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DecompositionSeriesBandsBandLimits(VectorData):
"""
Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class DecompositionSeriesBandsBandMean(VectorData):
"""
The mean Gaussian filters, in Hz.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class DecompositionSeriesBandsBandStdev(VectorData):
"""
The standard deviation of Gaussian filters, in Hz.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsSpikeTimesIndex(VectorIndex):
"""
Index into the spike_times dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsSpikeTimes(VectorData):
"""
Spike times for each unit in seconds.
"""
resolution: Optional[float] = Field(None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsObsIntervalsIndex(VectorIndex):
"""
Index into the obs_intervals dataset.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsObsIntervals(VectorData):
"""
Observation intervals for each unit.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsElectrodesIndex(VectorIndex):
"""
Index into electrodes.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsElectrodes(DynamicTableRegion):
"""
Electrode that each spike unit came from, specified using a DynamicTableRegion.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[str] = Field(None, description="""Unit of measurement. This value is fixed to 'volts'.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsWaveformsIndex(VectorIndex):
"""
Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class UnitsWaveformsIndexIndex(VectorIndex):
"""
Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
AbstractFeatureSeriesData.update_forward_refs()
AbstractFeatureSeriesDataArray.update_forward_refs()
AbstractFeatureSeriesFeatureUnits.update_forward_refs()
AbstractFeatureSeriesFeatures.update_forward_refs()
AnnotationSeriesData.update_forward_refs()
IntervalSeriesData.update_forward_refs()
DecompositionSeriesData.update_forward_refs()
DecompositionSeriesDataArray.update_forward_refs()
DecompositionSeriesSourceChannels.update_forward_refs()
DecompositionSeriesBands.update_forward_refs()
DecompositionSeriesBandsBandLimits.update_forward_refs()
DecompositionSeriesBandsBandMean.update_forward_refs()
DecompositionSeriesBandsBandStdev.update_forward_refs()
UnitsSpikeTimesIndex.update_forward_refs()
UnitsSpikeTimes.update_forward_refs()
UnitsObsIntervalsIndex.update_forward_refs()
UnitsObsIntervals.update_forward_refs()
UnitsElectrodesIndex.update_forward_refs()
UnitsElectrodes.update_forward_refs()
UnitsWaveformMean.update_forward_refs()
UnitsWaveformSd.update_forward_refs()
UnitsWaveforms.update_forward_refs()
UnitsWaveformsIndex.update_forward_refs()
UnitsWaveformsIndexIndex.update_forward_refs()

View file

@ -0,0 +1,67 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_base import (
NWBContainer,
TimeSeries
)
from .core_nwb_ogen_include import (
OptogeneticSeriesData
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class OptogeneticSeries(TimeSeries):
"""
An optogenetic stimulus.
"""
data: OptogeneticSeriesData = Field(..., description="""Applied power for optogenetic stimulus, in watts.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
"""
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
OptogeneticSeries.update_forward_refs()
OptogeneticStimulusSite.update_forward_refs()

View file

@ -0,0 +1,42 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class OptogeneticSeriesData(ConfiguredBaseModel):
"""
Applied power for optogenetic stimulus, in watts.
"""
unit: Optional[str] = Field(None, description="""Unit of measurement for data, which is fixed to 'watts'.""")
data: List[float] = Field(default_factory=list, description="""Applied power for optogenetic stimulus, in watts.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
OptogeneticSeriesData.update_forward_refs()

View file

@ -0,0 +1,207 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_ophys_include import (
CorrectedImageStackXyTranslation,
PlaneSegmentationVoxelMaskIndex,
ImagingPlaneOriginCoords,
TwoPhotonSeriesFieldOfView,
RoiResponseSeriesRois,
CorrectedImageStackCorrected,
ImagingPlaneManifold,
PlaneSegmentationImageMask,
PlaneSegmentationReferenceImages,
ImagingPlaneGridSpacing,
RoiResponseSeriesData,
PlaneSegmentationPixelMaskIndex
)
from .core_nwb_base import (
TimeSeries,
NWBContainer,
NWBDataInterface
)
from .hdmf_common_table import (
DynamicTable
)
from .core_nwb_image import (
ImageSeries
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class OnePhotonSeries(ImageSeries):
"""
Image stack recorded over time from 1-photon microscope.
"""
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""")
exposure_time: Optional[float] = Field(None, description="""Exposure time of the sample; often the inverse of the frequency.""")
binning: Optional[int] = Field(None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""")
power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
intensity: Optional[float] = Field(None, description="""Intensity of the excitation in mW/mm^2, if known.""")
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class TwoPhotonSeries(ImageSeries):
"""
Image stack recorded over time from 2-photon microscope.
"""
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""")
field_of_view: Optional[TwoPhotonSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class RoiResponseSeries(TimeSeries):
"""
ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs.
"""
data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: RoiResponseSeriesRois = Field(..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
"""
RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing dF/F for a ROI.""")
class Fluorescence(NWBDataInterface):
"""
Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes).
"""
RoiResponseSeries: List[RoiResponseSeries] = Field(default_factory=list, description="""RoiResponseSeries object(s) containing fluorescence data for a ROI.""")
class ImageSegmentation(NWBDataInterface):
"""
Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them.
"""
PlaneSegmentation: List[PlaneSegmentation] = Field(default_factory=list, description="""Results from image segmentation of a specific imaging plane.""")
class PlaneSegmentation(DynamicTable):
"""
Results from image segmentation of a specific imaging plane.
"""
image_mask: Optional[PlaneSegmentationImageMask] = Field(None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""")
pixel_mask_index: Optional[PlaneSegmentationPixelMaskIndex] = Field(None, description="""Index into pixel_mask.""")
pixel_mask: Optional[List[Any]] = Field(default_factory=list, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""")
voxel_mask_index: Optional[PlaneSegmentationVoxelMaskIndex] = Field(None, description="""Index into voxel_mask.""")
voxel_mask: Optional[List[Any]] = Field(default_factory=list, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""")
reference_images: PlaneSegmentationReferenceImages = Field(..., description="""Image stacks that the segmentation masks apply to.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class ImagingPlane(NWBContainer):
"""
An imaging plane and its metadata.
"""
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: Optional[float] = Field(None, description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""")
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(..., description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
manifold: Optional[ImagingPlaneManifold] = Field(None, description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""")
origin_coords: Optional[ImagingPlaneOriginCoords] = Field(None, description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""")
grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(None, description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""")
reference_frame: Optional[str] = Field(None, description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""")
OpticalChannel: List[OpticalChannel] = Field(default_factory=list, description="""An optical channel used to record from an imaging plane.""")
class OpticalChannel(NWBContainer):
"""
An optical channel used to record from an imaging plane.
"""
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
"""
An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions).
"""
CorrectedImageStack: List[CorrectedImageStack] = Field(default_factory=list, description="""Reuslts from motion correction of an image stack.""")
class CorrectedImageStack(NWBDataInterface):
"""
Reuslts from motion correction of an image stack.
"""
corrected: CorrectedImageStackCorrected = Field(..., description="""Image stack with frames shifted to the common coordinates.""")
xy_translation: CorrectedImageStackXyTranslation = Field(..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
OnePhotonSeries.update_forward_refs()
TwoPhotonSeries.update_forward_refs()
RoiResponseSeries.update_forward_refs()
DfOverF.update_forward_refs()
Fluorescence.update_forward_refs()
ImageSegmentation.update_forward_refs()
PlaneSegmentation.update_forward_refs()
ImagingPlane.update_forward_refs()
OpticalChannel.update_forward_refs()
MotionCorrection.update_forward_refs()
CorrectedImageStack.update_forward_refs()

View file

@ -0,0 +1,212 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion,
VectorIndex,
VectorData
)
from .core_nwb_image import (
ImageSeries
)
from .nwb_language import (
Arraylike
)
from .core_nwb_base import (
TimeSeries
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class TwoPhotonSeriesFieldOfView(ConfiguredBaseModel):
"""
Width, height and depth of image, or imaged area, in meters.
"""
array: Optional[NDArray[Shape["2 width|height, 3 width|height|depth"], Float32]] = Field(None)
class TwoPhotonSeriesFieldOfViewArray(Arraylike):
width|height: Optional[float] = Field(None)
width|height|depth: Optional[float] = Field(None)
class RoiResponseSeriesData(ConfiguredBaseModel):
"""
Signals from ROIs.
"""
array: Optional[NDArray[Shape["* num_times, * num_ROIs"], Number]] = Field(None)
class RoiResponseSeriesDataArray(Arraylike):
num_times: float = Field(...)
num_ROIs: Optional[float] = Field(None)
class RoiResponseSeriesRois(DynamicTableRegion):
"""
DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class PlaneSegmentationImageMask(VectorData):
"""
ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class PlaneSegmentationPixelMaskIndex(VectorIndex):
"""
Index into pixel_mask.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class PlaneSegmentationVoxelMaskIndex(VectorIndex):
"""
Index into voxel_mask.
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
class PlaneSegmentationReferenceImages(ConfiguredBaseModel):
"""
Image stacks that the segmentation masks apply to.
"""
ImageSeries: Optional[List[ImageSeries]] = Field(default_factory=list, description="""One or more image stacks that the masks apply to (can be one-element stack).""")
class ImagingPlaneManifold(ConfiguredBaseModel):
"""
DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
"""
conversion: Optional[float] = Field(None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""")
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. The default value is 'meters'.""")
array: Optional[NDArray[Shape["* height, * width, 3 x_y_z, * depth"], Float32]] = Field(None)
class ImagingPlaneManifoldArray(Arraylike):
height: float = Field(...)
width: float = Field(...)
x_y_z: float = Field(...)
depth: Optional[float] = Field(None)
class ImagingPlaneOriginCoords(ConfiguredBaseModel):
"""
Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
"""
unit: Optional[str] = Field(None, description="""Measurement units for origin_coords. The default value is 'meters'.""")
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], Float32]] = Field(None)
class ImagingPlaneOriginCoordsArray(Arraylike):
x_y: Optional[float] = Field(None)
x_y_z: Optional[float] = Field(None)
class ImagingPlaneGridSpacing(ConfiguredBaseModel):
"""
Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
"""
unit: Optional[str] = Field(None, description="""Measurement units for grid_spacing. The default value is 'meters'.""")
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], Float32]] = Field(None)
class ImagingPlaneGridSpacingArray(Arraylike):
x_y: Optional[float] = Field(None)
x_y_z: Optional[float] = Field(None)
class CorrectedImageStackCorrected(ImageSeries):
"""
Image stack with frames shifted to the common coordinates.
"""
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[ImageSeriesDimension] = Field(None, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[ImageSeriesExternalFile] = Field(None, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
class CorrectedImageStackXyTranslation(TimeSeries):
"""
Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.
"""
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: TimeSeriesData = Field(..., description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[TimeSeriesTimestamps] = Field(None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[TimeSeriesControl] = Field(None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[TimeSeriesControlDescription] = Field(None, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
TwoPhotonSeriesFieldOfView.update_forward_refs()
TwoPhotonSeriesFieldOfViewArray.update_forward_refs()
RoiResponseSeriesData.update_forward_refs()
RoiResponseSeriesDataArray.update_forward_refs()
RoiResponseSeriesRois.update_forward_refs()
PlaneSegmentationImageMask.update_forward_refs()
PlaneSegmentationPixelMaskIndex.update_forward_refs()
PlaneSegmentationVoxelMaskIndex.update_forward_refs()
PlaneSegmentationReferenceImages.update_forward_refs()
ImagingPlaneManifold.update_forward_refs()
ImagingPlaneManifoldArray.update_forward_refs()
ImagingPlaneOriginCoords.update_forward_refs()
ImagingPlaneOriginCoordsArray.update_forward_refs()
ImagingPlaneGridSpacing.update_forward_refs()
ImagingPlaneGridSpacingArray.update_forward_refs()
CorrectedImageStackCorrected.update_forward_refs()
CorrectedImageStackXyTranslation.update_forward_refs()

View file

@ -0,0 +1,63 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .core_nwb_retinotopy_include import (
ImagingRetinotopyAxisDescriptions,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyVasculatureImage,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopySignMap
)
from .core_nwb_base import (
NWBDataInterface
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ImagingRetinotopy(NWBDataInterface):
"""
Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x).
"""
axis_1_phase_map: ImagingRetinotopyAxis1PhaseMap = Field(..., description="""Phase response to stimulus on the first measured axis.""")
axis_1_power_map: Optional[ImagingRetinotopyAxis1PowerMap] = Field(None, description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_2_phase_map: ImagingRetinotopyAxis2PhaseMap = Field(..., description="""Phase response to stimulus on the second measured axis.""")
axis_2_power_map: Optional[ImagingRetinotopyAxis2PowerMap] = Field(None, description="""Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""")
axis_descriptions: ImagingRetinotopyAxisDescriptions = Field(..., description="""Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].""")
focal_depth_image: Optional[ImagingRetinotopyFocalDepthImage] = Field(None, description="""Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].""")
sign_map: Optional[ImagingRetinotopySignMap] = Field(None, description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""")
vasculature_image: ImagingRetinotopyVasculatureImage = Field(..., description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImagingRetinotopy.update_forward_refs()

View file

@ -0,0 +1,173 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the first measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
class ImagingRetinotopyAxis1PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
"""
Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
class ImagingRetinotopyAxis1PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
"""
Phase response to stimulus on the second measured axis.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
class ImagingRetinotopyAxis2PhaseMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
"""
Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
class ImagingRetinotopyAxis2PowerMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyAxisDescriptions(ConfiguredBaseModel):
"""
Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].
"""
axis_descriptions: List[str] = Field(default_factory=list, description="""Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].""")
class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
"""
Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], UInt16]] = Field(None)
class ImagingRetinotopyFocalDepthImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
class ImagingRetinotopySignMap(ConfiguredBaseModel):
"""
Sine of the angle between the direction of the gradient in axis_1 and axis_2.
"""
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
class ImagingRetinotopySignMapArray(Arraylike):
num_rows: Optional[float] = Field(None)
num_cols: Optional[float] = Field(None)
class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
"""
Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]
"""
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], UInt16]] = Field(None)
class ImagingRetinotopyVasculatureImageArray(Arraylike):
num_rows: Optional[int] = Field(None)
num_cols: Optional[int] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
ImagingRetinotopyAxis1PhaseMap.update_forward_refs()
ImagingRetinotopyAxis1PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis1PowerMap.update_forward_refs()
ImagingRetinotopyAxis1PowerMapArray.update_forward_refs()
ImagingRetinotopyAxis2PhaseMap.update_forward_refs()
ImagingRetinotopyAxis2PhaseMapArray.update_forward_refs()
ImagingRetinotopyAxis2PowerMap.update_forward_refs()
ImagingRetinotopyAxis2PowerMapArray.update_forward_refs()
ImagingRetinotopyAxisDescriptions.update_forward_refs()
ImagingRetinotopyFocalDepthImage.update_forward_refs()
ImagingRetinotopyFocalDepthImageArray.update_forward_refs()
ImagingRetinotopySignMap.update_forward_refs()
ImagingRetinotopySignMapArray.update_forward_refs()
ImagingRetinotopyVasculatureImage.update_forward_refs()
ImagingRetinotopyVasculatureImageArray.update_forward_refs()

View file

@ -1,272 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "1.8.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class CSRMatrixIndices(ConfiguredBaseModel):
"""
The column indices.
"""
array: Optional[CSRMatrixIndicesArray] = Field(None)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
The row index pointer.
"""
array: Optional[CSRMatrixIndptrArray] = Field(None)
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
array: Optional[CSRMatrixDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class CSRMatrixIndicesArray(Arraylike):
number_of_non_zero_values: int = Field(...)
class CSRMatrixIndptrArray(Arraylike):
number_of_rows_in_the_matrix_+_1: int = Field(...)
class CSRMatrixDataArray(Arraylike):
number_of_non_zero_values: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: CSRMatrixIndices = Field(..., description="""The column indices.""")
indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""")
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrixIndices.update_forward_refs()
CSRMatrixIndptr.update_forward_refs()
CSRMatrixData.update_forward_refs()
Arraylike.update_forward_refs()
CSRMatrixIndicesArray.update_forward_refs()
CSRMatrixIndptrArray.update_forward_refs()
CSRMatrixDataArray.update_forward_refs()
Data.update_forward_refs()
Container.update_forward_refs()
CSRMatrix.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
VectorData.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndex.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiers.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTable.update_forward_refs()
DynamicTableId.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
AlignedDynamicTable.update_forward_refs()

View file

@ -1,170 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class CSRMatrixIndices(ConfiguredBaseModel):
"""
The column indices.
"""
array: Optional[CSRMatrixIndicesArray] = Field(None)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
The row index pointer.
"""
array: Optional[CSRMatrixIndptrArray] = Field(None)
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
array: Optional[CSRMatrixDataArray] = Field(None)
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class CSRMatrixIndicesArray(Arraylike):
number_of_non_zero_values: int = Field(...)
class CSRMatrixIndptrArray(Arraylike):
number_of_rows_in_the_matrix_+_1: int = Field(...)
class CSRMatrixDataArray(Arraylike):
number_of_non_zero_values: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: CSRMatrixIndices = Field(..., description="""The column indices.""")
indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""")
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrixIndices.update_forward_refs()
CSRMatrixIndptr.update_forward_refs()
CSRMatrixData.update_forward_refs()
Arraylike.update_forward_refs()
CSRMatrixIndicesArray.update_forward_refs()
CSRMatrixIndptrArray.update_forward_refs()
CSRMatrixDataArray.update_forward_refs()
Data.update_forward_refs()
Container.update_forward_refs()
CSRMatrix.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,219 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,229 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
Data.update_forward_refs()
VectorData.update_forward_refs()
EnumData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableId.update_forward_refs()
Container.update_forward_refs()
DynamicTable.update_forward_refs()
AlignedDynamicTable.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -1,326 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "0.5.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class HERDKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDFilesArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntitiesArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectsArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntityKeysArray(Arraylike):
num_rows: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class HERDKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
array: Optional[HERDKeysArray] = Field(None)
class HERDFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
array: Optional[HERDFilesArray] = Field(None)
class HERDEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
array: Optional[HERDEntitiesArray] = Field(None)
class HERDObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
array: Optional[HERDObjectsArray] = Field(None)
class HERDObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
array: Optional[HERDObjectKeysArray] = Field(None)
class HERDEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
array: Optional[HERDEntityKeysArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class HERD(Container):
"""
HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files.
"""
keys: HERDKeys = Field(<built-in method keys of dict object at 0x10cdf3540>, description="""A table for storing user terms that are used to refer to external resources.""")
files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""")
entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""")
objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""")
object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""")
entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[VectorDataArray] = Field(None)
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
array: Optional[VectorIndexArray] = Field(None)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
class VectorIndexArray(Arraylike):
num_rows: int = Field(...)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[ElementIdentifiersArray] = Field(None)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
array: Optional[DynamicTableRegionArray] = Field(None)
class DynamicTableRegionArray(Arraylike):
num_rows: int = Field(...)
class DynamicTable(Container):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
class DynamicTableId(ElementIdentifiers):
"""
Array of unique identifiers for the rows of this dynamic table.
"""
array: Optional[DynamicTableIdArray] = Field(None)
class DynamicTableIdArray(Arraylike):
num_rows: int = Field(...)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
HERDKeysArray.update_forward_refs()
HERDFilesArray.update_forward_refs()
HERDEntitiesArray.update_forward_refs()
HERDObjectsArray.update_forward_refs()
HERDObjectKeysArray.update_forward_refs()
HERDEntityKeysArray.update_forward_refs()
Data.update_forward_refs()
HERDKeys.update_forward_refs()
HERDFiles.update_forward_refs()
HERDEntities.update_forward_refs()
HERDObjects.update_forward_refs()
HERDObjectKeys.update_forward_refs()
HERDEntityKeys.update_forward_refs()
Container.update_forward_refs()
HERD.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
VectorData.update_forward_refs()
EnumData.update_forward_refs()
VectorDataArray.update_forward_refs()
VectorIndex.update_forward_refs()
VectorIndexArray.update_forward_refs()
ElementIdentifiers.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableRegion.update_forward_refs()
DynamicTableRegionArray.update_forward_refs()
DynamicTable.update_forward_refs()
DynamicTableId.update_forward_refs()
DynamicTableIdArray.update_forward_refs()
AlignedDynamicTable.update_forward_refs()

View file

@ -1,214 +0,0 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
class HERDKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDFilesArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntitiesArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectsArray(Arraylike):
num_rows: Any = Field(...)
class HERDObjectKeysArray(Arraylike):
num_rows: Any = Field(...)
class HERDEntityKeysArray(Arraylike):
num_rows: Any = Field(...)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
None
class HERDKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
array: Optional[HERDKeysArray] = Field(None)
class HERDFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
array: Optional[HERDFilesArray] = Field(None)
class HERDEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
array: Optional[HERDEntitiesArray] = Field(None)
class HERDObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
array: Optional[HERDObjectsArray] = Field(None)
class HERDObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
array: Optional[HERDObjectKeysArray] = Field(None)
class HERDEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
array: Optional[HERDEntityKeysArray] = Field(None)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
None
class HERD(Container):
"""
HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files.
"""
keys: HERDKeys = Field(<built-in method keys of dict object at 0x10ad57dc0>, description="""A table for storing user terms that are used to refer to external resources.""")
files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""")
entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""")
objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""")
object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""")
entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""")
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
Data: Optional[List[Data]] = Field(default_factory=list, description="""Data objects held within this SimpleMultiContainer.""")
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Arraylike.update_forward_refs()
HERDKeysArray.update_forward_refs()
HERDFilesArray.update_forward_refs()
HERDEntitiesArray.update_forward_refs()
HERDObjectsArray.update_forward_refs()
HERDObjectKeysArray.update_forward_refs()
HERDEntityKeysArray.update_forward_refs()
Data.update_forward_refs()
HERDKeys.update_forward_refs()
HERDFiles.update_forward_refs()
HERDEntities.update_forward_refs()
HERDObjects.update_forward_refs()
HERDObjectKeys.update_forward_refs()
HERDEntityKeys.update_forward_refs()
Container.update_forward_refs()
HERD.update_forward_refs()
SimpleMultiContainer.update_forward_refs()

View file

@ -0,0 +1,33 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "1.8.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/

View file

@ -3,6 +3,7 @@ from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -10,6 +11,7 @@ else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
@ -26,59 +28,6 @@ class ConfiguredBaseModel(WeakRefShimBaseModel,
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
@ -101,17 +50,9 @@ class SimpleMultiContainer(Container):
Container: Optional[List[Container]] = Field(default_factory=list, description="""Container objects held within this SimpleMultiContainer.""")
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
None
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
Data.update_forward_refs()
Container.update_forward_refs()
SimpleMultiContainer.update_forward_refs()
Arraylike.update_forward_refs()

View file

@ -0,0 +1,33 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/

View file

@ -0,0 +1,54 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_sparse_include import (
CSRMatrixData,
CSRMatrixIndices,
CSRMatrixIndptr
)
from .hdmf_common_base import (
Container
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: CSRMatrixIndices = Field(..., description="""The column indices.""")
indptr: CSRMatrixIndptr = Field(..., description="""The row index pointer.""")
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrix.update_forward_refs()

View file

@ -0,0 +1,57 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class CSRMatrixIndices(ConfiguredBaseModel):
"""
The column indices.
"""
indices: List[int] = Field(default_factory=list, description="""The column indices.""")
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
The row index pointer.
"""
indptr: List[int] = Field(default_factory=list, description="""The row index pointer.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
data: List[Any] = Field(default_factory=list, description="""The non-zero values in the matrix.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
CSRMatrixIndices.update_forward_refs()
CSRMatrixIndptr.update_forward_refs()
CSRMatrixData.update_forward_refs()

View file

@ -0,0 +1,105 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
# from .hdmf_common_table_include import (
# VectorDataArray,
# ElementIdentifiersArray,
# DynamicTableId
# )
from .hdmf_common_base import (
Container,
Data
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class VectorData(Data):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any]] = Field(None)
class VectorIndex(VectorData):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any]] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
array: Optional[NDArray[Shape["* num_elements"], Int]] = Field(None)
# class DynamicTableRegion(VectorData):
# """
# DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
# """
# table: Optional[DynamicTable] = Field(None, description="""Reference to the DynamicTable object that this region applies to.""")
# description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
# array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any]] = Field(None)
#
# class DynamicTable(Container):
# """
# A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
# """
# colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
# description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
# id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
# VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
#
# class AlignedDynamicTable(DynamicTable):
# """
# DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
# """
# categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
# DynamicTable: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
# colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
# description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
# id: DynamicTableId = Field(..., description="""Array of unique identifiers for the rows of this dynamic table.""")
# VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
#
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
VectorData.update_forward_refs()
VectorIndex.update_forward_refs()
ElementIdentifiers.update_forward_refs()
DynamicTableRegion.update_forward_refs()
# DynamicTable.update_forward_refs()
# AlignedDynamicTable.update_forward_refs()

View file

@ -0,0 +1,65 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
# from .hdmf_common_table import (
# ElementIdentifiers
# )
from .nwb_language import (
Arraylike
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class VectorDataArray(Arraylike):
dim0: Any = Field(...)
dim1: Optional[Any] = Field(None)
dim2: Optional[Any] = Field(None)
dim3: Optional[Any] = Field(None)
class ElementIdentifiersArray(Arraylike):
num_elements: int = Field(...)
# class DynamicTableId(ElementIdentifiers):
# """
# Array of unique identifiers for the rows of this dynamic table.
# """
# id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
# array: Optional[NDArray[Shape["* num_elements"], Int]] = Field(None)
#
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
VectorDataArray.update_forward_refs()
ElementIdentifiersArray.update_forward_refs()
DynamicTableId.update_forward_refs()

View file

@ -0,0 +1,33 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "0.5.0"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/

View file

@ -0,0 +1,47 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_table import (
VectorData
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
elements: Optional[VectorData] = Field(None, description="""Reference to the VectorData object that contains the enumerable elements""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], ]] = Field(None)
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
EnumData.update_forward_refs()

View file

@ -0,0 +1,33 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/

View file

@ -0,0 +1,59 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_base import (
Container
)
from .hdmf_experimental_resources_include import (
HERDObjectKeys,
HERDObjects,
HERDEntities,
HERDKeys,
HERDFiles,
HERDEntityKeys
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class HERD(Container):
"""
HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files.
"""
keys: HERDKeys = Field(<built-in method keys of dict object at 0x10ab5b600>, description="""A table for storing user terms that are used to refer to external resources.""")
files: HERDFiles = Field(..., description="""A table for storing object ids of files used in external resources.""")
entities: HERDEntities = Field(..., description="""A table for mapping user terms (i.e., keys) to resource entities.""")
objects: HERDObjects = Field(..., description="""A table for identifying which objects in a file contain references to external resources.""")
object_keys: HERDObjectKeys = Field(..., description="""A table for identifying which objects use which keys.""")
entity_keys: HERDEntityKeys = Field(..., description="""A table for identifying which keys use which entity.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
HERD.update_forward_refs()

View file

@ -0,0 +1,85 @@
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
from .hdmf_common_base import (
Data
)
metamodel_version = "None"
version = "None"
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = 'forbid',
arbitrary_types_allowed = True,
use_enum_values = True):
pass
class HERDKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
keys: List[Any] = Field(default_factory=list, description="""A table for storing user terms that are used to refer to external resources.""")
class HERDFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
files: List[Any] = Field(default_factory=list, description="""A table for storing object ids of files used in external resources.""")
class HERDEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
entities: List[Any] = Field(default_factory=list, description="""A table for mapping user terms (i.e., keys) to resource entities.""")
class HERDObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
objects: List[Any] = Field(default_factory=list, description="""A table for identifying which objects in a file contain references to external resources.""")
class HERDObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
object_keys: List[Any] = Field(default_factory=list, description="""A table for identifying which objects use which keys.""")
class HERDEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
entity_keys: List[Any] = Field(default_factory=list, description="""A table for identifying which keys use which entity.""")
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
HERDKeys.update_forward_refs()
HERDFiles.update_forward_refs()
HERDEntities.update_forward_refs()
HERDObjects.update_forward_refs()
HERDObjectKeys.update_forward_refs()
HERDEntityKeys.update_forward_refs()

View file

@ -3,6 +3,7 @@ from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel as BaseModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -10,6 +11,7 @@ else:
from typing_extensions import Literal
metamodel_version = "None"
version = "None"
@ -26,59 +28,6 @@ class ConfiguredBaseModel(WeakRefShimBaseModel,
pass
class FlatDType(str, Enum):
float = "float"
float32 = "float32"
double = "double"
float64 = "float64"
long = "long"
int64 = "int64"
int = "int"
int32 = "int32"
int16 = "int16"
short = "short"
int8 = "int8"
uint = "uint"
uint32 = "uint32"
uint16 = "uint16"
uint8 = "uint8"
uint64 = "uint64"
numeric = "numeric"
text = "text"
utf = "utf"
utf8 = "utf8"
utf_8 = "utf_8"
ascii = "ascii"
bool = "bool"
isodatetime = "isodatetime"
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.

View file

@ -0,0 +1,192 @@
name: core.nwb.base.include
id: core.nwb.base.include
imports:
- hdmf-common.base
- hdmf-common.table
- nwb.language
- core.nwb.base.include
- core.nwb.base
default_prefix: core.nwb.base.include/
classes:
Image__Array:
name: Image__Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
ImageReferences__Array:
name: ImageReferences__Array
is_a: Arraylike
attributes:
num_images:
name: num_images
range: Image
required: true
TimeSeries__data:
name: TimeSeries__data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension
should always represent time. This can also be used to store binary data (e.g.,
image frames). This can also be a link to data stored in an external file.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as signed 16-bit
integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
range: float32
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
its coercion to the specified 'unit'. Two common examples of this include
(a) data stored in an unsigned type that requires a shift after scaling
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
"instantaneous", or "step". For example, a voltage trace would be "continuous",
because samples are recorded from a continuous process. An array of lick
times would be "instantaneous", because the data represents distinct moments
in time. Times of image presentations would be "step" because the picture
remains the same until the next timepoint. This field is optional, but is
useful in providing information about the underlying data. It may inform
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
array:
name: array
range: TimeSeries__data__Array
TimeSeries__data__Array:
name: TimeSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: AnyType
required: true
num_DIM2:
name: num_DIM2
range: AnyType
required: false
num_DIM3:
name: num_DIM3
range: AnyType
required: false
num_DIM4:
name: num_DIM4
range: AnyType
required: false
TimeSeries__starting_time:
name: TimeSeries__starting_time
description: Timestamp of the first sample in seconds. When timestamps are uniformly
spaced, the timestamp of the first sample can be specified and all subsequent
ones calculated from the sampling rate attribute.
attributes:
rate:
name: rate
description: Sampling rate, in Hz.
range: float32
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
range: text
TimeSeries__timestamps:
name: TimeSeries__timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
the common experiment master-clock stored in NWBFile.timestamps_reference_time.
multivalued: true
range: float64
required: false
TimeSeries__control:
name: TimeSeries__control
description: Numerical labels that apply to each time point in data for the purpose
of querying and slicing data by these values. If present, the length of this
array should be the same size as the first dimension of data.
attributes:
control:
name: control
description: Numerical labels that apply to each time point in data for the
purpose of querying and slicing data by these values. If present, the length
of this array should be the same size as the first dimension of data.
multivalued: true
range: uint8
required: false
TimeSeries__control_description:
name: TimeSeries__control_description
description: Description of each control value. Must be present if control is
present. If present, control_description[0] should describe time points where
control == 0.
attributes:
control_description:
name: control_description
description: Description of each control value. Must be present if control
is present. If present, control_description[0] should describe time points
where control == 0.
multivalued: true
range: text
required: false
TimeSeries__sync:
name: TimeSeries__sync
description: Lab-specific time and sync information as provided directly from
hardware devices and that is necessary for aligning all acquired time information
to a common timebase. The timestamp array stores time in the common timebase.
This group will usually only be populated in TimeSeries that are stored external
to the NWB file, in files storing raw data. Once timestamp data is calculated,
the contents of 'sync' are mostly for archival purposes.
Images__order_of_images:
name: Images__order_of_images
description: Ordered dataset of references to Image objects stored in the parent
group. Each Image object in the Images group should be stored once and only
once, so the dataset should have the same length as the number of images.
is_a: ImageReferences

View file

@ -4,6 +4,8 @@ imports:
- hdmf-common.base
- hdmf-common.table
- nwb.language
- core.nwb.base.include
- core.nwb.base
default_prefix: core.nwb.base/
classes:
NWBData:
@ -33,31 +35,7 @@ classes:
range: text
array:
name: array
range: Image_Array
Image_Array:
name: Image_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
range: Image__Array
ImageReferences:
name: ImageReferences
description: Ordered dataset of references to Image objects.
@ -65,15 +43,7 @@ classes:
attributes:
array:
name: array
range: ImageReferences_Array
ImageReferences_Array:
name: ImageReferences_Array
is_a: Arraylike
attributes:
num_images:
name: num_images
range: Image
required: true
range: ImageReferences__Array
NWBContainer:
name: NWBContainer
description: An abstract data type for a generic container storing collections
@ -106,7 +76,7 @@ classes:
data (e.g., image frames). This can also be a link to data stored in an
external file.
multivalued: false
range: TimeSeries_data
range: TimeSeries__data
required: true
starting_time:
name: starting_time
@ -114,14 +84,14 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
multivalued: false
range: TimeSeries_starting_time
range: TimeSeries__starting_time
required: false
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
the common experiment master-clock stored in NWBFile.timestamps_reference_time.
multivalued: false
range: TimeSeries_timestamps
range: TimeSeries__timestamps
required: false
control:
name: control
@ -129,7 +99,7 @@ classes:
purpose of querying and slicing data by these values. If present, the length
of this array should be the same size as the first dimension of data.
multivalued: false
range: TimeSeries_control
range: TimeSeries__control
required: false
control_description:
name: control_description
@ -137,7 +107,7 @@ classes:
is present. If present, control_description[0] should describe time points
where control == 0.
multivalued: false
range: TimeSeries_control_description
range: TimeSeries__control_description
required: false
sync:
name: sync
@ -148,162 +118,8 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
multivalued: false
range: TimeSeries_sync
range: TimeSeries__sync
required: false
TimeSeries_data:
name: TimeSeries_data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension
should always represent time. This can also be used to store binary data (e.g.,
image frames). This can also be a link to data stored in an external file.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as signed 16-bit
integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
range: float32
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
its coercion to the specified 'unit'. Two common examples of this include
(a) data stored in an unsigned type that requires a shift after scaling
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
"instantaneous", or "step". For example, a voltage trace would be "continuous",
because samples are recorded from a continuous process. An array of lick
times would be "instantaneous", because the data represents distinct moments
in time. Times of image presentations would be "step" because the picture
remains the same until the next timepoint. This field is optional, but is
useful in providing information about the underlying data. It may inform
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
array:
name: array
range: TimeSeries_data_Array
TimeSeries_data_Array:
name: TimeSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: AnyType
required: true
num_DIM2:
name: num_DIM2
range: AnyType
required: false
num_DIM3:
name: num_DIM3
range: AnyType
required: false
num_DIM4:
name: num_DIM4
range: AnyType
required: false
TimeSeries_starting_time:
name: TimeSeries_starting_time
description: Timestamp of the first sample in seconds. When timestamps are uniformly
spaced, the timestamp of the first sample can be specified and all subsequent
ones calculated from the sampling rate attribute.
attributes:
rate:
name: rate
description: Sampling rate, in Hz.
range: float32
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
range: text
TimeSeries_timestamps:
name: TimeSeries_timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
array:
name: array
range: TimeSeries_timestamps_Array
TimeSeries_timestamps_Array:
name: TimeSeries_timestamps_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: float64
required: true
TimeSeries_control:
name: TimeSeries_control
description: Numerical labels that apply to each time point in data for the purpose
of querying and slicing data by these values. If present, the length of this
array should be the same size as the first dimension of data.
attributes:
array:
name: array
range: TimeSeries_control_Array
TimeSeries_control_Array:
name: TimeSeries_control_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: uint8
required: true
TimeSeries_control_description:
name: TimeSeries_control_description
description: Description of each control value. Must be present if control is
present. If present, control_description[0] should describe time points where
control == 0.
attributes:
array:
name: array
range: TimeSeries_control_description_Array
TimeSeries_control_description_Array:
name: TimeSeries_control_description_Array
is_a: Arraylike
attributes:
num_control_values:
name: num_control_values
range: text
required: true
TimeSeries_sync:
name: TimeSeries_sync
description: Lab-specific time and sync information as provided directly from
hardware devices and that is necessary for aligning all acquired time information
to a common timebase. The timestamp array stores time in the common timebase.
This group will usually only be populated in TimeSeries that are stored external
to the NWB file, in files storing raw data. Once timestamp data is calculated,
the contents of 'sync' are mostly for archival purposes.
ProcessingModule:
name: ProcessingModule
description: A collection of processed data.
@ -349,11 +165,5 @@ classes:
and only once, so the dataset should have the same length as the number
of images.
multivalued: false
range: Images_order_of_images
range: Images__order_of_images
required: false
Images_order_of_images:
name: Images_order_of_images
description: Ordered dataset of references to Image objects stored in the parent
group. Each Image object in the Images group should be stored once and only
once, so the dataset should have the same length as the number of images.
is_a: ImageReferences

View file

@ -0,0 +1,51 @@
name: core.nwb.behavior.include
id: core.nwb.behavior.include
imports:
- core.nwb.base
- core.nwb.misc
- nwb.language
- core.nwb.behavior.include
- core.nwb.behavior
default_prefix: core.nwb.behavior.include/
classes:
SpatialSeries__data:
name: SpatialSeries__data
description: 1-D or 2-D array storing position or direction relative to some reference
frame.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
array:
name: array
range: SpatialSeries__data__Array
SpatialSeries__data__Array:
name: SpatialSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
x:
name: x
range: numeric
required: false
minimum_cardinality: 1
maximum_cardinality: 1
x,y:
name: x,y
range: numeric
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x,y,z:
name: x,y,z
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3

View file

@ -4,6 +4,8 @@ imports:
- core.nwb.base
- core.nwb.misc
- nwb.language
- core.nwb.behavior.include
- core.nwb.behavior
default_prefix: core.nwb.behavior/
classes:
SpatialSeries:
@ -24,58 +26,14 @@ classes:
description: 1-D or 2-D array storing position or direction relative to some
reference frame.
multivalued: false
range: SpatialSeries_data
range: SpatialSeries__data
required: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
multivalued: false
range: SpatialSeries_reference_frame
required: false
SpatialSeries_data:
name: SpatialSeries_data
description: 1-D or 2-D array storing position or direction relative to some reference
frame.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
array:
name: array
range: SpatialSeries_data_Array
SpatialSeries_data_Array:
name: SpatialSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
x:
name: x
range: numeric
required: false
minimum_cardinality: 1
maximum_cardinality: 1
x,y:
name: x,y
range: numeric
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x,y,z:
name: x,y,z
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
SpatialSeries_reference_frame:
name: SpatialSeries_reference_frame
description: Description defining what exactly 'straight-ahead' means.
BehavioralEpochs:
name: BehavioralEpochs
description: TimeSeries for storing behavioral epochs. The objective of this

View file

@ -0,0 +1,8 @@
name: core.nwb.device.include
id: core.nwb.device.include
imports:
- core.nwb.base
- nwb.language
- core.nwb.device.include
- core.nwb.device
default_prefix: core.nwb.device.include/

View file

@ -3,6 +3,8 @@ id: core.nwb.device
imports:
- core.nwb.base
- nwb.language
- core.nwb.device.include
- core.nwb.device
default_prefix: core.nwb.device/
classes:
Device:

View file

@ -0,0 +1,283 @@
name: core.nwb.ecephys.include
id: core.nwb.ecephys.include
imports:
- core.nwb.base
- hdmf-common.table
- core.nwb.device
- nwb.language
- core.nwb.ecephys.include
- core.nwb.ecephys
default_prefix: core.nwb.ecephys.include/
classes:
ElectricalSeries__data:
name: ElectricalSeries__data
description: Recorded voltage data.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. This value
is fixed to 'volts'. Actual stored values are not necessarily stored in
these units. To access the data in these units, multiply 'data' by 'conversion',
followed by 'channel_conversion' (if present), and then add 'offset'.
range: text
array:
name: array
range: ElectricalSeries__data__Array
ElectricalSeries__data__Array:
name: ElectricalSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
num_samples:
name: num_samples
range: numeric
required: false
ElectricalSeries__electrodes:
name: ElectricalSeries__electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
ElectricalSeries__channel_conversion:
name: ElectricalSeries__channel_conversion
description: Channel-specific conversion factor. Multiply the data in the 'data'
dataset by these values along the channel axis (as indicated by axis attribute)
AND by the global conversion factor in the 'conversion' attribute of 'data'
to get the data values in Volts, i.e, data in Volts = data * data.conversion
* channel_conversion. This approach allows for both global and per-channel data
conversion factors needed to support the storage of electrical recordings as
native values generated by data acquisition systems. If this dataset is not
present, then there is no channel-specific conversion factor, i.e. it is 1 for
all channels.
attributes:
axis:
name: axis
description: The zero-indexed axis of the 'data' dataset that the channel-specific
conversion factor corresponds to. This value is fixed to 1.
range: int32
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
'data' dataset by these values along the channel axis (as indicated by axis
attribute) AND by the global conversion factor in the 'conversion' attribute
of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion
* channel_conversion. This approach allows for both global and per-channel
data conversion factors needed to support the storage of electrical recordings
as native values generated by data acquisition systems. If this dataset
is not present, then there is no channel-specific conversion factor, i.e.
it is 1 for all channels.
multivalued: true
range: float32
required: false
SpikeEventSeries__data:
name: SpikeEventSeries__data
description: Spike waveforms.
attributes:
unit:
name: unit
description: Unit of measurement for waveforms, which is fixed to 'volts'.
range: text
array:
name: array
range: SpikeEventSeries__data__Array
SpikeEventSeries__data__Array:
name: SpikeEventSeries__data__Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: numeric
required: true
num_samples:
name: num_samples
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
SpikeEventSeries__timestamps:
name: SpikeEventSeries__timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
Timestamps are required for the events. Unlike for TimeSeries, timestamps are
required for SpikeEventSeries and are thus re-specified here.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
the common experiment master-clock stored in NWBFile.timestamps_reference_time.
Timestamps are required for the events. Unlike for TimeSeries, timestamps
are required for SpikeEventSeries and are thus re-specified here.
multivalued: true
range: float64
required: true
FeatureExtraction__description:
name: FeatureExtraction__description
description: Description of features (eg, ''PC1'') for each of the extracted features.
attributes:
description:
name: description
description: Description of features (eg, ''PC1'') for each of the extracted
features.
multivalued: true
range: text
required: true
FeatureExtraction__features:
name: FeatureExtraction__features
description: Multi-dimensional array of features extracted from each event.
attributes:
array:
name: array
range: FeatureExtraction__features__Array
FeatureExtraction__features__Array:
name: FeatureExtraction__features__Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float32
required: false
num_channels:
name: num_channels
range: float32
required: false
num_features:
name: num_features
range: float32
required: false
FeatureExtraction__times:
name: FeatureExtraction__times
description: Times of events that features correspond to (can be a link).
attributes:
times:
name: times
description: Times of events that features correspond to (can be a link).
multivalued: true
range: float64
required: true
FeatureExtraction__electrodes:
name: FeatureExtraction__electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
EventDetection__source_idx:
name: EventDetection__source_idx
description: Indices (zero-based) into source ElectricalSeries::data array corresponding
to time of event. ''description'' should define what is meant by time of event
(e.g., .25 ms before action potential peak, zero-crossing time, etc). The index
points to each event from the raw data.
attributes:
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
corresponding to time of event. ''description'' should define what is meant
by time of event (e.g., .25 ms before action potential peak, zero-crossing
time, etc). The index points to each event from the raw data.
multivalued: true
range: int32
required: true
EventDetection__times:
name: EventDetection__times
description: Timestamps of events, in seconds.
attributes:
unit:
name: unit
description: Unit of measurement for event times, which is fixed to 'seconds'.
range: text
times:
name: times
description: Timestamps of events, in seconds.
multivalued: true
range: float64
required: true
ClusterWaveforms__waveform_mean:
name: ClusterWaveforms__waveform_mean
description: The mean waveform for each cluster, using the same indices for each
wave as cluster numbers in the associated Clustering module (i.e, cluster 3
is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should
be empty (e.g., zero- filled)
attributes:
array:
name: array
range: ClusterWaveforms__waveform_mean__Array
ClusterWaveforms__waveform_mean__Array:
name: ClusterWaveforms__waveform_mean__Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
ClusterWaveforms__waveform_sd:
name: ClusterWaveforms__waveform_sd
description: Stdev of waveforms for each cluster, using the same indices as in
mean
attributes:
array:
name: array
range: ClusterWaveforms__waveform_sd__Array
ClusterWaveforms__waveform_sd__Array:
name: ClusterWaveforms__waveform_sd__Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
Clustering__num:
name: Clustering__num
description: Cluster number of each event
attributes:
num:
name: num
description: Cluster number of each event
multivalued: true
range: int32
required: true
Clustering__peak_over_rms:
name: Clustering__peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
attributes:
peak_over_rms:
name: peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
multivalued: true
range: float32
required: true
Clustering__times:
name: Clustering__times
description: Times of clustered events, in seconds. This may be a link to times
field in associated FeatureExtraction module.
attributes:
times:
name: times
description: Times of clustered events, in seconds. This may be a link to
times field in associated FeatureExtraction module.
multivalued: true
range: float64
required: true

View file

@ -3,7 +3,10 @@ id: core.nwb.ecephys
imports:
- core.nwb.base
- hdmf-common.table
- core.nwb.device
- nwb.language
- core.nwb.ecephys.include
- core.nwb.ecephys
default_prefix: core.nwb.ecephys/
classes:
ElectricalSeries:
@ -28,14 +31,14 @@ classes:
name: data
description: Recorded voltage data.
multivalued: false
range: ElectricalSeries_data
range: ElectricalSeries__data
required: true
electrodes:
name: electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
multivalued: false
range: ElectricalSeries_electrodes
range: ElectricalSeries__electrodes
required: true
channel_conversion:
name: channel_conversion
@ -49,71 +52,8 @@ classes:
is not present, then there is no channel-specific conversion factor, i.e.
it is 1 for all channels.
multivalued: false
range: ElectricalSeries_channel_conversion
range: ElectricalSeries__channel_conversion
required: false
ElectricalSeries_data:
name: ElectricalSeries_data
description: Recorded voltage data.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. This value
is fixed to 'volts'. Actual stored values are not necessarily stored in
these units. To access the data in these units, multiply 'data' by 'conversion',
followed by 'channel_conversion' (if present), and then add 'offset'.
range: text
array:
name: array
range: ElectricalSeries_data_Array
ElectricalSeries_data_Array:
name: ElectricalSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
num_samples:
name: num_samples
range: numeric
required: false
ElectricalSeries_electrodes:
name: ElectricalSeries_electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
ElectricalSeries_channel_conversion:
name: ElectricalSeries_channel_conversion
description: Channel-specific conversion factor. Multiply the data in the 'data'
dataset by these values along the channel axis (as indicated by axis attribute)
AND by the global conversion factor in the 'conversion' attribute of 'data'
to get the data values in Volts, i.e, data in Volts = data * data.conversion
* channel_conversion. This approach allows for both global and per-channel data
conversion factors needed to support the storage of electrical recordings as
native values generated by data acquisition systems. If this dataset is not
present, then there is no channel-specific conversion factor, i.e. it is 1 for
all channels.
attributes:
axis:
name: axis
description: The zero-indexed axis of the 'data' dataset that the channel-specific
conversion factor corresponds to. This value is fixed to 1.
range: int32
array:
name: array
range: ElectricalSeries_channel_conversion_Array
ElectricalSeries_channel_conversion_Array:
name: ElectricalSeries_channel_conversion_Array
is_a: Arraylike
attributes:
num_channels:
name: num_channels
range: float32
required: true
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@ -130,7 +70,7 @@ classes:
name: data
description: Spike waveforms.
multivalued: false
range: SpikeEventSeries_data
range: SpikeEventSeries__data
required: true
timestamps:
name: timestamps
@ -139,60 +79,7 @@ classes:
Timestamps are required for the events. Unlike for TimeSeries, timestamps
are required for SpikeEventSeries and are thus re-specified here.
multivalued: false
range: SpikeEventSeries_timestamps
required: true
SpikeEventSeries_data:
name: SpikeEventSeries_data
description: Spike waveforms.
attributes:
unit:
name: unit
description: Unit of measurement for waveforms, which is fixed to 'volts'.
range: text
array:
name: array
range: SpikeEventSeries_data_Array
SpikeEventSeries_data_Array:
name: SpikeEventSeries_data_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: numeric
required: true
num_samples:
name: num_samples
range: numeric
required: true
num_channels:
name: num_channels
range: numeric
required: false
SpikeEventSeries_timestamps:
name: SpikeEventSeries_timestamps
description: Timestamps for samples stored in data, in seconds, relative to the
common experiment master-clock stored in NWBFile.timestamps_reference_time.
Timestamps are required for the events. Unlike for TimeSeries, timestamps are
required for SpikeEventSeries and are thus re-specified here.
attributes:
interval:
name: interval
description: Value is '1'
range: int32
unit:
name: unit
description: Unit of measurement for timestamps, which is fixed to 'seconds'.
range: text
array:
name: array
range: SpikeEventSeries_timestamps_Array
SpikeEventSeries_timestamps_Array:
name: SpikeEventSeries_timestamps_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: float64
range: SpikeEventSeries__timestamps
required: true
FeatureExtraction:
name: FeatureExtraction
@ -205,85 +92,27 @@ classes:
description: Description of features (eg, ''PC1'') for each of the extracted
features.
multivalued: false
range: FeatureExtraction_description
range: FeatureExtraction__description
required: true
features:
name: features
description: Multi-dimensional array of features extracted from each event.
multivalued: false
range: FeatureExtraction_features
range: FeatureExtraction__features
required: true
times:
name: times
description: Times of events that features correspond to (can be a link).
multivalued: false
range: FeatureExtraction_times
range: FeatureExtraction__times
required: true
electrodes:
name: electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
multivalued: false
range: FeatureExtraction_electrodes
range: FeatureExtraction__electrodes
required: true
FeatureExtraction_description:
name: FeatureExtraction_description
description: Description of features (eg, ''PC1'') for each of the extracted features.
attributes:
array:
name: array
range: FeatureExtraction_description_Array
FeatureExtraction_description_Array:
name: FeatureExtraction_description_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
required: true
FeatureExtraction_features:
name: FeatureExtraction_features
description: Multi-dimensional array of features extracted from each event.
attributes:
array:
name: array
range: FeatureExtraction_features_Array
FeatureExtraction_features_Array:
name: FeatureExtraction_features_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float32
required: false
num_channels:
name: num_channels
range: float32
required: false
num_features:
name: num_features
range: float32
required: false
FeatureExtraction_times:
name: FeatureExtraction_times
description: Times of events that features correspond to (can be a link).
attributes:
array:
name: array
range: FeatureExtraction_times_Array
FeatureExtraction_times_Array:
name: FeatureExtraction_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
required: true
FeatureExtraction_electrodes:
name: FeatureExtraction_electrodes
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
is_a: DynamicTableRegion
EventDetection:
name: EventDetection
description: Detected spike events from voltage trace(s).
@ -294,7 +123,7 @@ classes:
description: Description of how events were detected, such as voltage threshold,
or dV/dT threshold, as well as relevant values.
multivalued: false
range: EventDetection_detection_method
range: text
required: true
source_idx:
name: source_idx
@ -303,54 +132,13 @@ classes:
by time of event (e.g., .25 ms before action potential peak, zero-crossing
time, etc). The index points to each event from the raw data.
multivalued: false
range: EventDetection_source_idx
range: EventDetection__source_idx
required: true
times:
name: times
description: Timestamps of events, in seconds.
multivalued: false
range: EventDetection_times
required: true
EventDetection_detection_method:
name: EventDetection_detection_method
description: Description of how events were detected, such as voltage threshold,
or dV/dT threshold, as well as relevant values.
EventDetection_source_idx:
name: EventDetection_source_idx
description: Indices (zero-based) into source ElectricalSeries::data array corresponding
to time of event. ''description'' should define what is meant by time of event
(e.g., .25 ms before action potential peak, zero-crossing time, etc). The index
points to each event from the raw data.
attributes:
array:
name: array
range: EventDetection_source_idx_Array
EventDetection_source_idx_Array:
name: EventDetection_source_idx_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: int32
required: true
EventDetection_times:
name: EventDetection_times
description: Timestamps of events, in seconds.
attributes:
unit:
name: unit
description: Unit of measurement for event times, which is fixed to 'seconds'.
range: text
array:
name: array
range: EventDetection_times_Array
EventDetection_times_Array:
name: EventDetection_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
range: EventDetection__times
required: true
EventWaveform:
name: EventWaveform
@ -420,11 +208,8 @@ classes:
name: position
description: stereotaxic or common framework coordinates
multivalued: false
range: ElectrodeGroup_position
range: AnyType
required: false
ElectrodeGroup_position:
name: ElectrodeGroup_position
description: stereotaxic or common framework coordinates
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@ -439,7 +224,7 @@ classes:
name: waveform_filtering
description: Filtering applied to data before generating mean/sd
multivalued: false
range: ClusterWaveforms_waveform_filtering
range: text
required: true
waveform_mean:
name: waveform_mean
@ -448,60 +233,15 @@ classes:
3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence
should be empty (e.g., zero- filled)
multivalued: false
range: ClusterWaveforms_waveform_mean
range: ClusterWaveforms__waveform_mean
required: true
waveform_sd:
name: waveform_sd
description: Stdev of waveforms for each cluster, using the same indices as
in mean
multivalued: false
range: ClusterWaveforms_waveform_sd
range: ClusterWaveforms__waveform_sd
required: true
ClusterWaveforms_waveform_filtering:
name: ClusterWaveforms_waveform_filtering
description: Filtering applied to data before generating mean/sd
ClusterWaveforms_waveform_mean:
name: ClusterWaveforms_waveform_mean
description: The mean waveform for each cluster, using the same indices for each
wave as cluster numbers in the associated Clustering module (i.e, cluster 3
is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should
be empty (e.g., zero- filled)
attributes:
array:
name: array
range: ClusterWaveforms_waveform_mean_Array
ClusterWaveforms_waveform_mean_Array:
name: ClusterWaveforms_waveform_mean_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
ClusterWaveforms_waveform_sd:
name: ClusterWaveforms_waveform_sd
description: Stdev of waveforms for each cluster, using the same indices as in
mean
attributes:
array:
name: array
range: ClusterWaveforms_waveform_sd_Array
ClusterWaveforms_waveform_sd_Array:
name: ClusterWaveforms_waveform_sd_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: false
num_samples:
name: num_samples
range: float32
required: false
Clustering:
name: Clustering
description: DEPRECATED Clustered spike data, whether from automatic clustering
@ -513,76 +253,25 @@ classes:
description: Description of clusters or clustering, (e.g. cluster 0 is noise,
clusters curated using Klusters, etc)
multivalued: false
range: Clustering_description
range: text
required: true
num:
name: num
description: Cluster number of each event
multivalued: false
range: Clustering_num
range: Clustering__num
required: true
peak_over_rms:
name: peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
multivalued: false
range: Clustering_peak_over_rms
range: Clustering__peak_over_rms
required: true
times:
name: times
description: Times of clustered events, in seconds. This may be a link to
times field in associated FeatureExtraction module.
multivalued: false
range: Clustering_times
required: true
Clustering_description:
name: Clustering_description
description: Description of clusters or clustering, (e.g. cluster 0 is noise,
clusters curated using Klusters, etc)
Clustering_num:
name: Clustering_num
description: Cluster number of each event
attributes:
array:
name: array
range: Clustering_num_Array
Clustering_num_Array:
name: Clustering_num_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: int32
required: true
Clustering_peak_over_rms:
name: Clustering_peak_over_rms
description: Maximum ratio of waveform peak to RMS on any channel in the cluster
(provides a basic clustering metric).
attributes:
array:
name: array
range: Clustering_peak_over_rms_Array
Clustering_peak_over_rms_Array:
name: Clustering_peak_over_rms_Array
is_a: Arraylike
attributes:
num_clusters:
name: num_clusters
range: float32
required: true
Clustering_times:
name: Clustering_times
description: Times of clustered events, in seconds. This may be a link to times
field in associated FeatureExtraction module.
attributes:
array:
name: array
range: Clustering_times_Array
Clustering_times_Array:
name: Clustering_times_Array
is_a: Arraylike
attributes:
num_events:
name: num_events
range: float64
range: Clustering__times
required: true

View file

@ -0,0 +1,22 @@
name: core.nwb.epoch.include
id: core.nwb.epoch.include
imports:
- hdmf-common.table
- core.nwb.base
- nwb.language
- core.nwb.epoch.include
- core.nwb.epoch
default_prefix: core.nwb.epoch.include/
classes:
TimeIntervals__tags_index:
name: TimeIntervals__tags_index
description: Index for tags.
is_a: VectorIndex
TimeIntervals__timeseries:
name: TimeIntervals__timeseries
description: An index into a TimeSeries object.
is_a: TimeSeriesReferenceVectorData
TimeIntervals__timeseries_index:
name: TimeIntervals__timeseries_index
description: Index for timeseries.
is_a: VectorIndex

View file

@ -4,6 +4,8 @@ imports:
- hdmf-common.table
- core.nwb.base
- nwb.language
- core.nwb.epoch.include
- core.nwb.epoch
default_prefix: core.nwb.epoch/
classes:
TimeIntervals:
@ -15,60 +17,33 @@ classes:
start_time:
name: start_time
description: Start time of epoch, in seconds.
multivalued: false
range: TimeIntervals_start_time
required: true
multivalued: true
range: float32
stop_time:
name: stop_time
description: Stop time of epoch, in seconds.
multivalued: false
range: TimeIntervals_stop_time
required: true
multivalued: true
range: float32
tags:
name: tags
description: User-defined tags that identify or categorize events.
multivalued: false
range: TimeIntervals_tags
required: false
multivalued: true
range: text
tags_index:
name: tags_index
description: Index for tags.
multivalued: false
range: TimeIntervals_tags_index
range: TimeIntervals__tags_index
required: false
timeseries:
name: timeseries
description: An index into a TimeSeries object.
multivalued: false
range: TimeIntervals_timeseries
range: TimeIntervals__timeseries
required: false
timeseries_index:
name: timeseries_index
description: Index for timeseries.
multivalued: false
range: TimeIntervals_timeseries_index
range: TimeIntervals__timeseries_index
required: false
TimeIntervals_start_time:
name: TimeIntervals_start_time
description: Start time of epoch, in seconds.
is_a: VectorData
TimeIntervals_stop_time:
name: TimeIntervals_stop_time
description: Stop time of epoch, in seconds.
is_a: VectorData
TimeIntervals_tags:
name: TimeIntervals_tags
description: User-defined tags that identify or categorize events.
is_a: VectorData
TimeIntervals_tags_index:
name: TimeIntervals_tags_index
description: Index for tags.
is_a: VectorIndex
TimeIntervals_timeseries:
name: TimeIntervals_timeseries
description: An index into a TimeSeries object.
is_a: TimeSeriesReferenceVectorData
TimeIntervals_timeseries_index:
name: TimeIntervals_timeseries_index
description: Index for timeseries.
is_a: VectorIndex

View file

@ -0,0 +1,695 @@
name: core.nwb.file.include
id: core.nwb.file.include
imports:
- core.nwb.base
- hdmf-common.table
- core.nwb.device
- core.nwb.ecephys
- core.nwb.icephys
- core.nwb.ogen
- core.nwb.ophys
- core.nwb.epoch
- core.nwb.misc
- nwb.language
- core.nwb.file.include
- core.nwb.file
default_prefix: core.nwb.file.include/
classes:
NWBFile__file_create_date:
name: NWBFile__file_create_date
description: 'A record of the date the file was created and of subsequent modifications.
The date is stored in UTC with local timezone offset as ISO 8601 extended formatted
strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with
no timezone offset. Date accuracy is up to milliseconds. The file can be created
after the experiment was run, so this may differ from the experiment start time.
Each modification to the nwb file adds a new entry to the array.'
attributes:
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
modifications. The date is stored in UTC with local timezone offset as ISO
8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored
in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds.
The file can be created after the experiment was run, so this may differ
from the experiment start time. Each modification to the nwb file adds a
new entry to the array.'
multivalued: true
range: isodatetime
required: true
NWBFile__acquisition:
name: NWBFile__acquisition
description: Data streams recorded from the system, including ephys, ophys, tracking,
etc. This group should be read-only after the experiment is completed and timestamps
are corrected to a common timebase. The data stored here may be links to raw
data stored in external NWB files. This will allow keeping bulky raw data out
of the file while preserving the option of keeping some/all in the file. Acquired
data includes tracking and experimental data streams (i.e., everything measured
from the system). If bulky data is stored in the /acquisition group, the data
can exist in a separate NWB file that is linked to by the file being used for
processing and analysis.
attributes:
NWBDataInterface:
name: NWBDataInterface
description: Acquired, raw data.
multivalued: true
range: NWBDataInterface
required: false
DynamicTable:
name: DynamicTable
description: Tabular data that is relevant to acquisition
multivalued: true
range: DynamicTable
required: false
NWBFile__analysis:
name: NWBFile__analysis
description: Lab-specific and custom scientific analysis of data. There is no
defined format for the content of this group - the format is up to the individual
user/lab. To facilitate sharing analysis data between labs, the contents here
should be stored in standard types (e.g., neurodata_types) and appropriately
documented. The file can store lab-specific and custom data analysis without
restriction on its form or schema, reducing data formatting restrictions on
end users. Such data should be placed in the analysis group. The analysis data
should be documented so that it could be shared with other labs.
attributes:
NWBContainer:
name: NWBContainer
description: Custom analysis results.
multivalued: true
range: NWBContainer
required: false
DynamicTable:
name: DynamicTable
description: Tabular data that is relevant to data stored in analysis
multivalued: true
range: DynamicTable
required: false
NWBFile__scratch:
name: NWBFile__scratch
description: A place to store one-off analysis results. Data placed here is not
intended for sharing. By placing data here, users acknowledge that there is
no guarantee that their data meets any standard.
attributes:
ScratchData:
name: ScratchData
description: Any one-off datasets
multivalued: true
range: ScratchData
required: false
NWBContainer:
name: NWBContainer
description: Any one-off containers
multivalued: true
range: NWBContainer
required: false
DynamicTable:
name: DynamicTable
description: Any one-off tables
multivalued: true
range: DynamicTable
required: false
NWBFile__processing:
name: NWBFile__processing
description: The home for ProcessingModules. These modules perform intermediate
analysis of data that is necessary to perform before scientific analysis. Examples
include spike clustering, extracting position from tracking data, stitching
together image slices. ProcessingModules can be large and express many data
sets from relatively complex analysis (e.g., spike detection and clustering)
or small, representing extraction of position information from tracking video,
or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik,
MClust) are expected to read/write data here. 'Processing' refers to intermediate
analysis of the acquired data to make it more amenable to scientific analysis.
attributes:
ProcessingModule:
name: ProcessingModule
description: Intermediate analysis of acquired data.
multivalued: true
range: ProcessingModule
required: false
NWBFile__stimulus:
name: NWBFile__stimulus
description: Data pushed into the system (eg, video stimulus, sound, voltage,
etc) and secondary representations of that data (eg, measurements of something
used as a stimulus). This group should be made read-only after experiment complete
and timestamps are corrected to common timebase. Stores both presented stimuli
and stimulus templates, the latter in case the same stimulus is presented multiple
times, or is pulled from an external stimulus library. Stimuli are here defined
as any signal that is pushed into the system as part of the experiment (eg,
sound, video, voltage, etc). Many different experiments can use the same stimuli,
and stimuli can be re-used during an experiment. The stimulus group is organized
so that one version of template stimuli can be stored and these be used multiple
times. These templates can exist in the present file or can be linked to a remote
library file.
attributes:
presentation:
name: presentation
description: Stimuli presented during the experiment.
multivalued: false
range: NWBFile__stimulus__presentation
required: true
templates:
name: templates
description: Template stimuli. Timestamps in templates are based on stimulus
design and are relative to the beginning of the stimulus. When templates
are used, the stimulus instances must convert presentation times to the
experiment`s time reference frame.
multivalued: false
range: NWBFile__stimulus__templates
required: true
NWBFile__stimulus__presentation:
name: NWBFile__stimulus__presentation
description: Stimuli presented during the experiment.
attributes:
TimeSeries:
name: TimeSeries
description: TimeSeries objects containing data of presented stimuli.
multivalued: true
range: TimeSeries
required: false
NWBFile__stimulus__templates:
name: NWBFile__stimulus__templates
description: Template stimuli. Timestamps in templates are based on stimulus design
and are relative to the beginning of the stimulus. When templates are used,
the stimulus instances must convert presentation times to the experiment`s time
reference frame.
attributes:
TimeSeries:
name: TimeSeries
description: TimeSeries objects containing template data of presented stimuli.
multivalued: true
range: TimeSeries
required: false
Images:
name: Images
description: Images objects containing images of presented stimuli.
multivalued: true
range: Images
required: false
NWBFile__general:
name: NWBFile__general
description: Experimental metadata, including protocol, notes and description
of hardware device(s). The metadata stored in this section should be used to
describe the experiment. Metadata necessary for interpreting the data is stored
with the data. General experimental metadata, including animal strain, experimental
protocols, experimenter, devices, etc, are stored under 'general'. Core metadata
(e.g., that required to interpret data fields) is stored with the data itself,
and implicitly defined by the file specification (e.g., time is in seconds).
The strategy used here for storing non-core metadata is to use free-form text
fields, such as would appear in sentences or paragraphs from a Methods section.
Metadata fields are text to enable them to be more general, for example to represent
ranges instead of numerical values. Machine-readable metadata is stored as attributes
to these free-form datasets. All entries in the below table are to be included
when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology
experiment) should not be created unless there is data to store within them.
attributes:
data_collection:
name: data_collection
description: Notes about data collection and analysis.
multivalued: false
range: text
required: false
experiment_description:
name: experiment_description
description: General description of the experiment.
multivalued: false
range: text
required: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
roles of different people involved.
multivalued: false
range: NWBFile__general__experimenter
required: false
institution:
name: institution
description: Institution(s) where experiment was performed.
multivalued: false
range: text
required: false
keywords:
name: keywords
description: Terms to search over.
multivalued: false
range: NWBFile__general__keywords
required: false
lab:
name: lab
description: Laboratory where experiment was performed.
multivalued: false
range: text
required: false
notes:
name: notes
description: Notes about the experiment.
multivalued: false
range: text
required: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
multivalued: false
range: text
required: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
multivalued: false
range: text
required: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
multivalued: false
range: NWBFile__general__related_publications
required: false
session_id:
name: session_id
description: Lab-specific ID for the session.
multivalued: false
range: text
required: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
multivalued: false
range: text
required: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
multivalued: false
range: NWBFile__general__source_script
required: false
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
multivalued: false
range: text
required: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
multivalued: false
range: text
required: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
multivalued: false
range: text
required: false
LabMetaData:
name: LabMetaData
description: Place-holder than can be extended so that lab-specific meta-data
can be placed in /general.
multivalued: true
range: LabMetaData
required: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
monitors, ADC boards, microscopes, etc.
multivalued: false
range: NWBFile__general__devices
required: false
subject:
name: subject
description: Information about the animal or person from which the data was
measured.
multivalued: false
range: NWBFile__general__subject
required: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
multivalued: false
range: NWBFile__general__extracellular_ephys
required: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
multivalued: false
range: NWBFile__general__intracellular_ephys
required: false
optogenetics:
name: optogenetics
description: Metadata describing optogenetic stimuluation.
multivalued: false
range: NWBFile__general__optogenetics
required: false
optophysiology:
name: optophysiology
description: Metadata related to optophysiology.
multivalued: false
range: NWBFile__general__optophysiology
required: false
NWBFile__general__experimenter:
name: NWBFile__general__experimenter
description: Name of person(s) who performed the experiment. Can also specify
roles of different people involved.
attributes:
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
roles of different people involved.
multivalued: true
range: text
required: false
NWBFile__general__keywords:
name: NWBFile__general__keywords
description: Terms to search over.
attributes:
keywords:
name: keywords
description: Terms to search over.
multivalued: true
range: text
required: false
NWBFile__general__related_publications:
name: NWBFile__general__related_publications
description: Publication information. PMID, DOI, URL, etc.
attributes:
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
multivalued: true
range: text
required: false
NWBFile__general__source_script:
name: NWBFile__general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
file_name:
name: file_name
description: Name of script file.
range: text
NWBFile__general__devices:
name: NWBFile__general__devices
description: Description of hardware devices used during experiment, e.g., monitors,
ADC boards, microscopes, etc.
attributes:
Device:
name: Device
description: Data acquisition devices.
multivalued: true
range: Device
required: false
NWBFile__general__subject:
name: NWBFile__general__subject
description: Information about the animal or person from which the data was measured.
is_a: Subject
NWBFile__general__extracellular_ephys:
name: NWBFile__general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
ElectrodeGroup:
name: ElectrodeGroup
description: Physical group of electrodes.
multivalued: true
range: ElectrodeGroup
required: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
multivalued: false
range: NWBFile__general__extracellular_ephys__electrodes
required: false
NWBFile__general__extracellular_ephys__electrodes:
name: NWBFile__general__extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
x:
name: x
description: x coordinate of the channel location in the brain (+x is posterior).
multivalued: true
range: float32
y:
name: y
description: y coordinate of the channel location in the brain (+y is inferior).
multivalued: true
range: float32
z:
name: z
description: z coordinate of the channel location in the brain (+z is right).
multivalued: true
range: float32
imp:
name: imp
description: Impedance of the channel, in ohms.
multivalued: true
range: float32
location:
name: location
description: Location of the electrode (channel). Specify the area, layer,
comments on estimation of area/layer, stereotaxic coordinates if in vivo,
etc. Use standard atlas names for anatomical regions when possible.
multivalued: true
range: text
filtering:
name: filtering
description: Description of hardware filtering, including the filter name
and frequency cutoffs.
multivalued: true
range: text
group:
name: group
description: Reference to the ElectrodeGroup this electrode is a part of.
multivalued: true
range: ElectrodeGroup
group_name:
name: group_name
description: Name of the ElectrodeGroup this electrode is a part of.
multivalued: true
range: text
rel_x:
name: rel_x
description: x coordinate in electrode group
multivalued: true
range: float32
rel_y:
name: rel_y
description: y coordinate in electrode group
multivalued: true
range: float32
rel_z:
name: rel_z
description: z coordinate in electrode group
multivalued: true
range: float32
reference:
name: reference
description: Description of the reference electrode and/or reference scheme
used for this electrode, e.g., "stainless steel skull screw" or "online
common average referencing".
multivalued: true
range: text
NWBFile__general__intracellular_ephys:
name: NWBFile__general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
filtering:
name: filtering
description: '[DEPRECATED] Use IntracellularElectrode.filtering instead. Description
of filtering used. Includes filtering type and parameters, frequency fall-off,
etc. If this changes between TimeSeries, filter description should be stored
as a text attribute for each TimeSeries.'
multivalued: false
range: text
required: false
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode.
multivalued: true
range: IntracellularElectrode
required: false
sweep_table:
name: sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries.
SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable
tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
multivalued: false
range: NWBFile__general__intracellular_ephys__sweep_table
required: false
intracellular_recordings:
name: intracellular_recordings
description: A table to group together a stimulus and response from a single
electrode and a single simultaneous recording. Each row in the table represents
a single recording consisting typically of a stimulus and a corresponding
response. In some cases, however, only a stimulus or a response are recorded
as as part of an experiment. In this case both, the stimulus and response
will point to the same TimeSeries while the idx_start and count of the invalid
column will be set to -1, thus, indicating that no values have been recorded
for the stimulus or response, respectively. Note, a recording MUST contain
at least a stimulus or a response. Typically the stimulus and response are
PatchClampSeries. However, the use of AD/DA channels that are not associated
to an electrode is also common in intracellular electrophysiology, in which
case other TimeSeries may be used.
multivalued: false
range: NWBFile__general__intracellular_ephys__intracellular_recordings
required: false
simultaneous_recordings:
name: simultaneous_recordings
description: A table for grouping different intracellular recordings from
the IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
multivalued: false
range: NWBFile__general__intracellular_ephys__simultaneous_recordings
required: false
sequential_recordings:
name: sequential_recordings
description: A table for grouping different sequential recordings from the
SimultaneousRecordingsTable table together. This is typically used to group
together sequential recordings where the a sequence of stimuli of the same
type with varying parameters have been presented in a sequence.
multivalued: false
range: NWBFile__general__intracellular_ephys__sequential_recordings
required: false
repetitions:
name: repetitions
description: A table for grouping different sequential intracellular recordings
together. With each SequentialRecording typically representing a particular
type of stimulus, the RepetitionsTable table is typically used to group
sets of stimuli applied in sequence.
multivalued: false
range: NWBFile__general__intracellular_ephys__repetitions
required: false
experimental_conditions:
name: experimental_conditions
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
multivalued: false
range: NWBFile__general__intracellular_ephys__experimental_conditions
required: false
NWBFile__general__intracellular_ephys__sweep_table:
name: NWBFile__general__intracellular_ephys__sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable
is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable
tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
is_a: SweepTable
NWBFile__general__intracellular_ephys__intracellular_recordings:
name: NWBFile__general__intracellular_ephys__intracellular_recordings
description: A table to group together a stimulus and response from a single electrode
and a single simultaneous recording. Each row in the table represents a single
recording consisting typically of a stimulus and a corresponding response. In
some cases, however, only a stimulus or a response are recorded as as part of
an experiment. In this case both, the stimulus and response will point to the
same TimeSeries while the idx_start and count of the invalid column will be
set to -1, thus, indicating that no values have been recorded for the stimulus
or response, respectively. Note, a recording MUST contain at least a stimulus
or a response. Typically the stimulus and response are PatchClampSeries. However,
the use of AD/DA channels that are not associated to an electrode is also common
in intracellular electrophysiology, in which case other TimeSeries may be used.
is_a: IntracellularRecordingsTable
NWBFile__general__intracellular_ephys__simultaneous_recordings:
name: NWBFile__general__intracellular_ephys__simultaneous_recordings
description: A table for grouping different intracellular recordings from the
IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
is_a: SimultaneousRecordingsTable
NWBFile__general__intracellular_ephys__sequential_recordings:
name: NWBFile__general__intracellular_ephys__sequential_recordings
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
table together. This is typically used to group together sequential recordings
where the a sequence of stimuli of the same type with varying parameters have
been presented in a sequence.
is_a: SequentialRecordingsTable
NWBFile__general__intracellular_ephys__repetitions:
name: NWBFile__general__intracellular_ephys__repetitions
description: A table for grouping different sequential intracellular recordings
together. With each SequentialRecording typically representing a particular
type of stimulus, the RepetitionsTable table is typically used to group sets
of stimuli applied in sequence.
is_a: RepetitionsTable
NWBFile__general__intracellular_ephys__experimental_conditions:
name: NWBFile__general__intracellular_ephys__experimental_conditions
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
is_a: ExperimentalConditionsTable
NWBFile__general__optogenetics:
name: NWBFile__general__optogenetics
description: Metadata describing optogenetic stimuluation.
attributes:
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: An optogenetic stimulation site.
multivalued: true
range: OptogeneticStimulusSite
required: false
NWBFile__general__optophysiology:
name: NWBFile__general__optophysiology
description: Metadata related to optophysiology.
attributes:
ImagingPlane:
name: ImagingPlane
description: An imaging plane.
multivalued: true
range: ImagingPlane
required: false
NWBFile__intervals:
name: NWBFile__intervals
description: Experimental intervals, whether that be logically distinct sub-experiments
having a particular scientific goal, trials (see trials subgroup) during an
experiment, or epochs (see epochs subgroup) deriving from analysis of data.
attributes:
epochs:
name: epochs
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
multivalued: false
range: NWBFile__intervals__epochs
required: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
multivalued: false
range: NWBFile__intervals__trials
required: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
multivalued: false
range: NWBFile__intervals__invalid_times
required: false
TimeIntervals:
name: TimeIntervals
description: Optional additional table(s) for describing other experimental
time intervals.
multivalued: true
range: TimeIntervals
required: false
NWBFile__intervals__epochs:
name: NWBFile__intervals__epochs
description: Divisions in time marking experimental stages or sub-divisions of
a single recording session.
is_a: TimeIntervals
NWBFile__intervals__trials:
name: NWBFile__intervals__trials
description: Repeated experimental events that have a logical grouping.
is_a: TimeIntervals
NWBFile__intervals__invalid_times:
name: NWBFile__intervals__invalid_times
description: Time intervals that should be removed from analysis.
is_a: TimeIntervals
NWBFile__units:
name: NWBFile__units
description: Data about sorted spike units.
is_a: Units
Subject__age:
name: Subject__age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
attributes:
reference:
name: reference
description: Age is with reference to this event. Can be 'birth' or 'gestational'.
If reference is omitted, 'birth' is implied.
range: text

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,221 @@
name: core.nwb.icephys.include
id: core.nwb.icephys.include
imports:
- core.nwb.base
- core.nwb.device
- hdmf-common.table
- nwb.language
- core.nwb.icephys.include
- core.nwb.icephys
default_prefix: core.nwb.icephys.include/
classes:
PatchClampSeries__data:
name: PatchClampSeries__data
description: Recorded voltage or current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
data:
name: data
description: Recorded voltage or current.
multivalued: true
range: numeric
required: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
description: Recorded voltage.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
CurrentClampStimulusSeries__data:
name: CurrentClampStimulusSeries__data
description: Stimulus current applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries__data:
name: VoltageClampSeries__data
description: Recorded current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries__capacitance_slow:
name: VoltageClampSeries__capacitance_slow
description: Slow capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
range: text
VoltageClampSeries__resistance_comp_correction:
name: VoltageClampSeries__resistance_comp_correction
description: Resistance compensation correction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
range: text
VoltageClampSeries__resistance_comp_prediction:
name: VoltageClampSeries__resistance_comp_prediction
description: Resistance compensation prediction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
range: text
VoltageClampSeries__whole_cell_capacitance_comp:
name: VoltageClampSeries__whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
range: text
VoltageClampSeries__whole_cell_series_resistance_comp:
name: VoltageClampSeries__whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
range: text
VoltageClampStimulusSeries__data:
name: VoltageClampStimulusSeries__data
description: Stimulus voltage applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
SweepTable__series_index:
name: SweepTable__series_index
description: Index for series.
is_a: VectorIndex
IntracellularStimuliTable__stimulus:
name: IntracellularStimuliTable__stimulus
description: Column storing the reference to the recorded stimulus for the recording
(rows).
is_a: TimeSeriesReferenceVectorData
IntracellularResponsesTable__response:
name: IntracellularResponsesTable__response
description: Column storing the reference to the recorded response for the recording
(rows)
is_a: TimeSeriesReferenceVectorData
IntracellularRecordingsTable__electrodes:
name: IntracellularRecordingsTable__electrodes
description: Table for storing intracellular electrode related metadata.
is_a: IntracellularElectrodesTable
IntracellularRecordingsTable__stimuli:
name: IntracellularRecordingsTable__stimuli
description: Table for storing intracellular stimulus related metadata.
is_a: IntracellularStimuliTable
IntracellularRecordingsTable__responses:
name: IntracellularRecordingsTable__responses
description: Table for storing intracellular response related metadata.
is_a: IntracellularResponsesTable
SimultaneousRecordingsTable__recordings:
name: SimultaneousRecordingsTable__recordings
description: A reference to one or more rows in the IntracellularRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the IntracellularRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
SimultaneousRecordingsTable__recordings_index:
name: SimultaneousRecordingsTable__recordings_index
description: Index dataset for the recordings column.
is_a: VectorIndex
SequentialRecordingsTable__simultaneous_recordings:
name: SequentialRecordingsTable__simultaneous_recordings
description: A reference to one or more rows in the SimultaneousRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SimultaneousRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
SequentialRecordingsTable__simultaneous_recordings_index:
name: SequentialRecordingsTable__simultaneous_recordings_index
description: Index dataset for the simultaneous_recordings column.
is_a: VectorIndex
RepetitionsTable__sequential_recordings:
name: RepetitionsTable__sequential_recordings
description: A reference to one or more rows in the SequentialRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SequentialRecordingsTable table that this table
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
RepetitionsTable__sequential_recordings_index:
name: RepetitionsTable__sequential_recordings_index
description: Index dataset for the sequential_recordings column.
is_a: VectorIndex
ExperimentalConditionsTable__repetitions:
name: ExperimentalConditionsTable__repetitions
description: A reference to one or more rows in the RepetitionsTable table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the RepetitionsTable table that this table region
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
ExperimentalConditionsTable__repetitions_index:
name: ExperimentalConditionsTable__repetitions_index
description: Index dataset for the repetitions column.
is_a: VectorIndex

View file

@ -2,8 +2,11 @@ name: core.nwb.icephys
id: core.nwb.icephys
imports:
- core.nwb.base
- core.nwb.device
- hdmf-common.table
- nwb.language
- core.nwb.icephys.include
- core.nwb.icephys
default_prefix: core.nwb.icephys/
classes:
PatchClampSeries:
@ -24,39 +27,15 @@ classes:
name: data
description: Recorded voltage or current.
multivalued: false
range: PatchClampSeries_data
range: PatchClampSeries__data
required: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
multivalued: false
range: PatchClampSeries_gain
range: float32
required: false
PatchClampSeries_data:
name: PatchClampSeries_data
description: Recorded voltage or current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
array:
name: array
range: PatchClampSeries_data_Array
PatchClampSeries_data_Array:
name: PatchClampSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
PatchClampSeries_gain:
name: PatchClampSeries_gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).
CurrentClampSeries:
name: CurrentClampSeries
description: Voltage data from an intracellular current-clamp recording. A corresponding
@ -68,46 +47,26 @@ classes:
name: data
description: Recorded voltage.
multivalued: false
range: CurrentClampSeries_data
range: CurrentClampSeries__data
required: true
bias_current:
name: bias_current
description: Bias current, in amps.
multivalued: false
range: CurrentClampSeries_bias_current
range: float32
required: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms.
multivalued: false
range: CurrentClampSeries_bridge_balance
range: float32
required: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads.
multivalued: false
range: CurrentClampSeries_capacitance_compensation
range: float32
required: false
CurrentClampSeries_data:
name: CurrentClampSeries_data
description: Recorded voltage.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
CurrentClampSeries_bias_current:
name: CurrentClampSeries_bias_current
description: Bias current, in amps.
CurrentClampSeries_bridge_balance:
name: CurrentClampSeries_bridge_balance
description: Bridge balance, in ohms.
CurrentClampSeries_capacitance_compensation:
name: CurrentClampSeries_capacitance_compensation
description: Capacitance compensation, in farads.
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@ -125,29 +84,20 @@ classes:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_bias_current
range: float32
required: true
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_bridge_balance
range: float32
required: true
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
multivalued: false
range: IZeroClampSeries_capacitance_compensation
range: float32
required: true
IZeroClampSeries_bias_current:
name: IZeroClampSeries_bias_current
description: Bias current, in amps, fixed to 0.0.
IZeroClampSeries_bridge_balance:
name: IZeroClampSeries_bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
IZeroClampSeries_capacitance_compensation:
name: IZeroClampSeries_capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
description: Stimulus current applied during current clamp recording.
@ -157,19 +107,8 @@ classes:
name: data
description: Stimulus current applied.
multivalued: false
range: CurrentClampStimulusSeries_data
range: CurrentClampStimulusSeries__data
required: true
CurrentClampStimulusSeries_data:
name: CurrentClampStimulusSeries_data
description: Stimulus current applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@ -181,122 +120,50 @@ classes:
name: data
description: Recorded current.
multivalued: false
range: VoltageClampSeries_data
range: VoltageClampSeries__data
required: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
multivalued: false
range: VoltageClampSeries_capacitance_fast
range: VoltageClampSeries__capacitance_fast
required: false
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
multivalued: false
range: VoltageClampSeries_capacitance_slow
range: VoltageClampSeries__capacitance_slow
required: false
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
multivalued: false
range: VoltageClampSeries_resistance_comp_bandwidth
range: VoltageClampSeries__resistance_comp_bandwidth
required: false
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
multivalued: false
range: VoltageClampSeries_resistance_comp_correction
range: VoltageClampSeries__resistance_comp_correction
required: false
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
multivalued: false
range: VoltageClampSeries_resistance_comp_prediction
range: VoltageClampSeries__resistance_comp_prediction
required: false
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
multivalued: false
range: VoltageClampSeries_whole_cell_capacitance_comp
range: VoltageClampSeries__whole_cell_capacitance_comp
required: false
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
multivalued: false
range: VoltageClampSeries_whole_cell_series_resistance_comp
range: VoltageClampSeries__whole_cell_series_resistance_comp
required: false
VoltageClampSeries_data:
name: VoltageClampSeries_data
description: Recorded current.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
VoltageClampSeries_capacitance_fast:
name: VoltageClampSeries_capacitance_fast
description: Fast capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries_capacitance_slow:
name: VoltageClampSeries_capacitance_slow
description: Slow capacitance, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
range: text
VoltageClampSeries_resistance_comp_bandwidth:
name: VoltageClampSeries_resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
range: text
VoltageClampSeries_resistance_comp_correction:
name: VoltageClampSeries_resistance_comp_correction
description: Resistance compensation correction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
range: text
VoltageClampSeries_resistance_comp_prediction:
name: VoltageClampSeries_resistance_comp_prediction
description: Resistance compensation prediction, in percent.
attributes:
unit:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
range: text
VoltageClampSeries_whole_cell_capacitance_comp:
name: VoltageClampSeries_whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
range: text
VoltageClampSeries_whole_cell_series_resistance_comp:
name: VoltageClampSeries_whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
attributes:
unit:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
range: text
VoltageClampStimulusSeries:
name: VoltageClampStimulusSeries
description: Stimulus voltage applied during a voltage clamp recording.
@ -306,19 +173,8 @@ classes:
name: data
description: Stimulus voltage applied.
multivalued: false
range: VoltageClampStimulusSeries_data
range: VoltageClampStimulusSeries__data
required: true
VoltageClampStimulusSeries_data:
name: VoltageClampStimulusSeries_data
description: Stimulus voltage applied.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
range: text
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@ -328,25 +184,25 @@ classes:
name: cell_id
description: unique ID of the cell
multivalued: false
range: IntracellularElectrode_cell_id
range: text
required: false
description:
name: description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
multivalued: false
range: IntracellularElectrode_description
range: text
required: true
filtering:
name: filtering
description: Electrode specific filtering.
multivalued: false
range: IntracellularElectrode_filtering
range: text
required: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
multivalued: false
range: IntracellularElectrode_initial_access_resistance
range: text
required: false
location:
name: location
@ -354,52 +210,26 @@ classes:
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: IntracellularElectrode_location
range: text
required: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
multivalued: false
range: IntracellularElectrode_resistance
range: text
required: false
seal:
name: seal
description: Information about seal used for recording.
multivalued: false
range: IntracellularElectrode_seal
range: text
required: false
slice:
name: slice
description: Information about slice used for recording.
multivalued: false
range: IntracellularElectrode_slice
range: text
required: false
IntracellularElectrode_cell_id:
name: IntracellularElectrode_cell_id
description: unique ID of the cell
IntracellularElectrode_description:
name: IntracellularElectrode_description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
IntracellularElectrode_filtering:
name: IntracellularElectrode_filtering
description: Electrode specific filtering.
IntracellularElectrode_initial_access_resistance:
name: IntracellularElectrode_initial_access_resistance
description: Initial access resistance.
IntracellularElectrode_location:
name: IntracellularElectrode_location
description: Location of the electrode. Specify the area, layer, comments on estimation
of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names
for anatomical regions when possible.
IntracellularElectrode_resistance:
name: IntracellularElectrode_resistance
description: Electrode resistance, in ohms.
IntracellularElectrode_seal:
name: IntracellularElectrode_seal
description: Information about seal used for recording.
IntracellularElectrode_slice:
name: IntracellularElectrode_slice
description: Information about slice used for recording.
SweepTable:
name: SweepTable
description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable
@ -411,33 +241,19 @@ classes:
sweep_number:
name: sweep_number
description: Sweep number of the PatchClampSeries in that row.
multivalued: false
range: SweepTable_sweep_number
required: true
multivalued: true
range: uint32
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
multivalued: false
range: SweepTable_series
required: true
multivalued: true
range: PatchClampSeries
series_index:
name: series_index
description: Index for series.
multivalued: false
range: SweepTable_series_index
range: SweepTable__series_index
required: true
SweepTable_sweep_number:
name: SweepTable_sweep_number
description: Sweep number of the PatchClampSeries in that row.
is_a: VectorData
SweepTable_series:
name: SweepTable_series
description: The PatchClampSeries with the sweep number in that row.
is_a: VectorData
SweepTable_series_index:
name: SweepTable_series_index
description: Index for series.
is_a: VectorIndex
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
description: Table for storing intracellular electrode related metadata.
@ -450,13 +266,8 @@ classes:
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
multivalued: false
range: IntracellularElectrodesTable_electrode
required: true
IntracellularElectrodesTable_electrode:
name: IntracellularElectrodesTable_electrode
description: Column for storing the reference to the intracellular electrode.
is_a: VectorData
multivalued: true
range: IntracellularElectrode
IntracellularStimuliTable:
name: IntracellularStimuliTable
description: Table for storing intracellular stimulus related metadata.
@ -471,13 +282,8 @@ classes:
description: Column storing the reference to the recorded stimulus for the
recording (rows).
multivalued: false
range: IntracellularStimuliTable_stimulus
range: IntracellularStimuliTable__stimulus
required: true
IntracellularStimuliTable_stimulus:
name: IntracellularStimuliTable_stimulus
description: Column storing the reference to the recorded stimulus for the recording
(rows).
is_a: TimeSeriesReferenceVectorData
IntracellularResponsesTable:
name: IntracellularResponsesTable
description: Table for storing intracellular response related metadata.
@ -492,13 +298,8 @@ classes:
description: Column storing the reference to the recorded response for the
recording (rows)
multivalued: false
range: IntracellularResponsesTable_response
range: IntracellularResponsesTable__response
required: true
IntracellularResponsesTable_response:
name: IntracellularResponsesTable_response
description: Column storing the reference to the recorded response for the recording
(rows)
is_a: TimeSeriesReferenceVectorData
IntracellularRecordingsTable:
name: IntracellularRecordingsTable
description: A table to group together a stimulus and response from a single electrode
@ -523,32 +324,20 @@ classes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
multivalued: false
range: IntracellularRecordingsTable_electrodes
range: IntracellularRecordingsTable__electrodes
required: true
stimuli:
name: stimuli
description: Table for storing intracellular stimulus related metadata.
multivalued: false
range: IntracellularRecordingsTable_stimuli
range: IntracellularRecordingsTable__stimuli
required: true
responses:
name: responses
description: Table for storing intracellular response related metadata.
multivalued: false
range: IntracellularRecordingsTable_responses
range: IntracellularRecordingsTable__responses
required: true
IntracellularRecordingsTable_electrodes:
name: IntracellularRecordingsTable_electrodes
description: Table for storing intracellular electrode related metadata.
is_a: IntracellularElectrodesTable
IntracellularRecordingsTable_stimuli:
name: IntracellularRecordingsTable_stimuli
description: Table for storing intracellular stimulus related metadata.
is_a: IntracellularStimuliTable
IntracellularRecordingsTable_responses:
name: IntracellularRecordingsTable_responses
description: Table for storing intracellular response related metadata.
is_a: IntracellularResponsesTable
SimultaneousRecordingsTable:
name: SimultaneousRecordingsTable
description: A table for grouping different intracellular recordings from the
@ -561,30 +350,14 @@ classes:
description: A reference to one or more rows in the IntracellularRecordingsTable
table.
multivalued: false
range: SimultaneousRecordingsTable_recordings
range: SimultaneousRecordingsTable__recordings
required: true
recordings_index:
name: recordings_index
description: Index dataset for the recordings column.
multivalued: false
range: SimultaneousRecordingsTable_recordings_index
range: SimultaneousRecordingsTable__recordings_index
required: true
SimultaneousRecordingsTable_recordings:
name: SimultaneousRecordingsTable_recordings
description: A reference to one or more rows in the IntracellularRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the IntracellularRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
SimultaneousRecordingsTable_recordings_index:
name: SimultaneousRecordingsTable_recordings_index
description: Index dataset for the recordings column.
is_a: VectorIndex
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
@ -598,40 +371,19 @@ classes:
description: A reference to one or more rows in the SimultaneousRecordingsTable
table.
multivalued: false
range: SequentialRecordingsTable_simultaneous_recordings
range: SequentialRecordingsTable__simultaneous_recordings
required: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
description: Index dataset for the simultaneous_recordings column.
multivalued: false
range: SequentialRecordingsTable_simultaneous_recordings_index
range: SequentialRecordingsTable__simultaneous_recordings_index
required: true
stimulus_type:
name: stimulus_type
description: The type of stimulus used for the sequential recording.
multivalued: false
range: SequentialRecordingsTable_stimulus_type
required: true
SequentialRecordingsTable_simultaneous_recordings:
name: SequentialRecordingsTable_simultaneous_recordings
description: A reference to one or more rows in the SimultaneousRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SimultaneousRecordingsTable table that this
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
SequentialRecordingsTable_simultaneous_recordings_index:
name: SequentialRecordingsTable_simultaneous_recordings_index
description: Index dataset for the simultaneous_recordings column.
is_a: VectorIndex
SequentialRecordingsTable_stimulus_type:
name: SequentialRecordingsTable_stimulus_type
description: The type of stimulus used for the sequential recording.
is_a: VectorData
multivalued: true
range: text
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
@ -645,30 +397,14 @@ classes:
description: A reference to one or more rows in the SequentialRecordingsTable
table.
multivalued: false
range: RepetitionsTable_sequential_recordings
range: RepetitionsTable__sequential_recordings
required: true
sequential_recordings_index:
name: sequential_recordings_index
description: Index dataset for the sequential_recordings column.
multivalued: false
range: RepetitionsTable_sequential_recordings_index
range: RepetitionsTable__sequential_recordings_index
required: true
RepetitionsTable_sequential_recordings:
name: RepetitionsTable_sequential_recordings
description: A reference to one or more rows in the SequentialRecordingsTable
table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the SequentialRecordingsTable table that this table
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
RepetitionsTable_sequential_recordings_index:
name: RepetitionsTable_sequential_recordings_index
description: Index dataset for the sequential_recordings column.
is_a: VectorIndex
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
@ -679,26 +415,11 @@ classes:
name: repetitions
description: A reference to one or more rows in the RepetitionsTable table.
multivalued: false
range: ExperimentalConditionsTable_repetitions
range: ExperimentalConditionsTable__repetitions
required: true
repetitions_index:
name: repetitions_index
description: Index dataset for the repetitions column.
multivalued: false
range: ExperimentalConditionsTable_repetitions_index
range: ExperimentalConditionsTable__repetitions_index
required: true
ExperimentalConditionsTable_repetitions:
name: ExperimentalConditionsTable_repetitions
description: A reference to one or more rows in the RepetitionsTable table.
is_a: DynamicTableRegion
attributes:
table:
name: table
description: Reference to the RepetitionsTable table that this table region
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
ExperimentalConditionsTable_repetitions_index:
name: ExperimentalConditionsTable_repetitions_index
description: Index dataset for the repetitions column.
is_a: VectorIndex

View file

@ -0,0 +1,207 @@
name: core.nwb.image.include
id: core.nwb.image.include
imports:
- core.nwb.base
- core.nwb.device
- nwb.language
- core.nwb.image.include
- core.nwb.image
default_prefix: core.nwb.image.include/
classes:
GrayscaleImage__Array:
name: GrayscaleImage__Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
RGBImage__Array:
name: RGBImage__Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
RGBAImage__Array:
name: RGBAImage__Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
ImageSeries__data:
name: ImageSeries__data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
attributes:
array:
name: array
range: ImageSeries__data__Array
ImageSeries__data__Array:
name: ImageSeries__data__Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
z:
name: z
range: numeric
required: false
ImageSeries__dimension:
name: ImageSeries__dimension
description: Number of pixels on x, y, (and z) axes.
attributes:
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
multivalued: true
range: int32
required: false
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
if format='external'. This is only relevant if the image series is stored in
the file system as one or more image file(s). This field should NOT be used
if the image is stored in another NWB file and that file is linked to this file.
attributes:
starting_frame:
name: starting_frame
description: Each external image may contain one or more consecutive frames
of the full ImageSeries. This attribute serves as an index to indicate which
frames each file contains, to facilitate random access. The 'starting_frame'
attribute, hence, contains a list of frame numbers within the full ImageSeries
of the first frame of each file listed in the parent 'external_file' dataset.
Zero-based indexing is used (hence, the first element will always be zero).
For example, if the 'external_file' dataset has three paths to files and
the first file has 5 frames, the second file has 10 frames, and the third
file has 20 frames, then this attribute will have values [0, 5, 15]. If
there is a single external file that holds all of the frames of the ImageSeries
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
external_file:
name: external_file
description: Paths to one or more external file(s). The field is only present
if format='external'. This is only relevant if the image series is stored
in the file system as one or more image file(s). This field should NOT be
used if the image is stored in another NWB file and that file is linked
to this file.
multivalued: true
range: text
required: false
OpticalSeries__field_of_view:
name: OpticalSeries__field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: OpticalSeries__field_of_view__Array
OpticalSeries__field_of_view__Array:
name: OpticalSeries__field_of_view__Array
is_a: Arraylike
attributes:
width, height:
name: width, height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width, height, depth:
name: width, height, depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
OpticalSeries__data:
name: OpticalSeries__data
description: Images presented to subject, either grayscale or RGB
attributes:
array:
name: array
range: OpticalSeries__data__Array
OpticalSeries__data__Array:
name: OpticalSeries__data__Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
IndexSeries__data:
name: IndexSeries__data
description: Index of the image (using zero-indexing) in the linked Images object.
attributes:
conversion:
name: conversion
description: This field is unused by IndexSeries.
range: float32
resolution:
name: resolution
description: This field is unused by IndexSeries.
range: float32
offset:
name: offset
description: This field is unused by IndexSeries.
range: float32
unit:
name: unit
description: This field is unused by IndexSeries and has the value N/A.
range: text
data:
name: data
description: Index of the image (using zero-indexing) in the linked Images
object.
multivalued: true
range: uint32
required: true

View file

@ -2,7 +2,10 @@ name: core.nwb.image
id: core.nwb.image
imports:
- core.nwb.base
- core.nwb.device
- nwb.language
- core.nwb.image.include
- core.nwb.image
default_prefix: core.nwb.image/
classes:
GrayscaleImage:
@ -12,19 +15,7 @@ classes:
attributes:
array:
name: array
range: GrayscaleImage_Array
GrayscaleImage_Array:
name: GrayscaleImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
range: GrayscaleImage__Array
RGBImage:
name: RGBImage
description: A color image.
@ -32,25 +23,7 @@ classes:
attributes:
array:
name: array
range: RGBImage_Array
RGBImage_Array:
name: RGBImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
range: RGBImage__Array
RGBAImage:
name: RGBAImage
description: A color image with transparency.
@ -58,25 +31,7 @@ classes:
attributes:
array:
name: array
range: RGBAImage_Array
RGBAImage_Array:
name: RGBAImage_Array
is_a: Arraylike
attributes:
x:
name: x
range: numeric
required: false
y:
name: y
range: numeric
required: false
r, g, b, a:
name: r, g, b, a
range: numeric
required: false
minimum_cardinality: 4
maximum_cardinality: 4
range: RGBAImage__Array
ImageSeries:
name: ImageSeries
description: General image data that is common between acquisition and stimulus
@ -92,13 +47,13 @@ classes:
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
multivalued: false
range: ImageSeries_data
range: ImageSeries__data
required: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
multivalued: false
range: ImageSeries_dimension
range: ImageSeries__dimension
required: false
external_file:
name: external_file
@ -108,7 +63,7 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
multivalued: false
range: ImageSeries_external_file
range: ImageSeries__external_file
required: false
format:
name: format
@ -117,90 +72,8 @@ classes:
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
multivalued: false
range: ImageSeries_format
required: false
ImageSeries_data:
name: ImageSeries_data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
attributes:
array:
name: array
range: ImageSeries_data_Array
ImageSeries_data_Array:
name: ImageSeries_data_Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
z:
name: z
range: numeric
required: false
ImageSeries_dimension:
name: ImageSeries_dimension
description: Number of pixels on x, y, (and z) axes.
attributes:
array:
name: array
range: ImageSeries_dimension_Array
ImageSeries_dimension_Array:
name: ImageSeries_dimension_Array
is_a: Arraylike
attributes:
rank:
name: rank
range: int32
required: true
ImageSeries_external_file:
name: ImageSeries_external_file
description: Paths to one or more external file(s). The field is only present
if format='external'. This is only relevant if the image series is stored in
the file system as one or more image file(s). This field should NOT be used
if the image is stored in another NWB file and that file is linked to this file.
attributes:
starting_frame:
name: starting_frame
description: Each external image may contain one or more consecutive frames
of the full ImageSeries. This attribute serves as an index to indicate which
frames each file contains, to facilitate random access. The 'starting_frame'
attribute, hence, contains a list of frame numbers within the full ImageSeries
of the first frame of each file listed in the parent 'external_file' dataset.
Zero-based indexing is used (hence, the first element will always be zero).
For example, if the 'external_file' dataset has three paths to files and
the first file has 5 frames, the second file has 10 frames, and the third
file has 20 frames, then this attribute will have values [0, 5, 15]. If
there is a single external file that holds all of the frames of the ImageSeries
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
array:
name: array
range: ImageSeries_external_file_Array
ImageSeries_external_file_Array:
name: ImageSeries_external_file_Array
is_a: Arraylike
attributes:
num_files:
name: num_files
range: text
required: true
ImageSeries_format:
name: ImageSeries_format
description: Format of image. If this is 'external', then the attribute 'external_file'
contains the path information to the image files. If this is 'raw', then the
raw (single-channel) binary data is stored in the 'data' dataset. If this attribute
is not present, then the default format='raw' case is assumed.
required: false
ImageMaskSeries:
name: ImageMaskSeries
description: An alpha mask that is applied to a presented visual stimulus. The
@ -222,86 +95,27 @@ classes:
name: distance
description: Distance from camera/monitor to target/eye.
multivalued: false
range: OpticalSeries_distance
range: float32
required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
multivalued: false
range: OpticalSeries_field_of_view
range: OpticalSeries__field_of_view
required: false
data:
name: data
description: Images presented to subject, either grayscale or RGB
multivalued: false
range: OpticalSeries_data
range: OpticalSeries__data
required: true
orientation:
name: orientation
description: Description of image relative to some reference frame (e.g.,
which way is up). Must also specify frame of reference.
multivalued: false
range: OpticalSeries_orientation
range: text
required: false
OpticalSeries_distance:
name: OpticalSeries_distance
description: Distance from camera/monitor to target/eye.
OpticalSeries_field_of_view:
name: OpticalSeries_field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: OpticalSeries_field_of_view_Array
OpticalSeries_field_of_view_Array:
name: OpticalSeries_field_of_view_Array
is_a: Arraylike
attributes:
width, height:
name: width, height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width, height, depth:
name: width, height, depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
OpticalSeries_data:
name: OpticalSeries_data
description: Images presented to subject, either grayscale or RGB
attributes:
array:
name: array
range: OpticalSeries_data_Array
OpticalSeries_data_Array:
name: OpticalSeries_data_Array
is_a: Arraylike
attributes:
frame:
name: frame
range: numeric
required: true
x:
name: x
range: numeric
required: true
y:
name: y
range: numeric
required: true
r, g, b:
name: r, g, b
range: numeric
required: false
minimum_cardinality: 3
maximum_cardinality: 3
OpticalSeries_orientation:
name: OpticalSeries_orientation
description: Description of image relative to some reference frame (e.g., which
way is up). Must also specify frame of reference.
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@ -318,36 +132,5 @@ classes:
description: Index of the image (using zero-indexing) in the linked Images
object.
multivalued: false
range: IndexSeries_data
required: true
IndexSeries_data:
name: IndexSeries_data
description: Index of the image (using zero-indexing) in the linked Images object.
attributes:
conversion:
name: conversion
description: This field is unused by IndexSeries.
range: float32
resolution:
name: resolution
description: This field is unused by IndexSeries.
range: float32
offset:
name: offset
description: This field is unused by IndexSeries.
range: float32
unit:
name: unit
description: This field is unused by IndexSeries and has the value N/A.
range: text
array:
name: array
range: IndexSeries_data_Array
IndexSeries_data_Array:
name: IndexSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: uint32
range: IndexSeries__data
required: true

View file

@ -0,0 +1,277 @@
name: core.nwb.misc.include
id: core.nwb.misc.include
imports:
- core.nwb.base
- hdmf-common.table
- core.nwb.ecephys
- nwb.language
- core.nwb.misc.include
- core.nwb.misc
default_prefix: core.nwb.misc.include/
classes:
AbstractFeatureSeries__data:
name: AbstractFeatureSeries__data
description: Values of each feature at each time.
attributes:
unit:
name: unit
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
range: text
array:
name: array
range: AbstractFeatureSeries__data__Array
AbstractFeatureSeries__data__Array:
name: AbstractFeatureSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_features:
name: num_features
range: numeric
required: false
AbstractFeatureSeries__feature_units:
name: AbstractFeatureSeries__feature_units
description: Units of each feature.
attributes:
feature_units:
name: feature_units
description: Units of each feature.
multivalued: true
range: text
required: false
AbstractFeatureSeries__features:
name: AbstractFeatureSeries__features
description: Description of the features represented in TimeSeries::data.
attributes:
features:
name: features
description: Description of the features represented in TimeSeries::data.
multivalued: true
range: text
required: true
AnnotationSeries__data:
name: AnnotationSeries__data
description: Annotations made during an experiment.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
data:
name: data
description: Annotations made during an experiment.
multivalued: true
range: text
required: true
IntervalSeries__data:
name: IntervalSeries__data
description: Use values >0 if interval started, <0 if interval ended.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
multivalued: true
range: int8
required: true
DecompositionSeries__data:
name: DecompositionSeries__data
description: Data decomposed into frequency bands.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
array:
name: array
range: DecompositionSeries__data__Array
DecompositionSeries__data__Array:
name: DecompositionSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: false
num_channels:
name: num_channels
range: numeric
required: false
num_bands:
name: num_bands
range: numeric
required: false
DecompositionSeries__source_channels:
name: DecompositionSeries__source_channels
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
is_a: DynamicTableRegion
DecompositionSeries__bands:
name: DecompositionSeries__bands
description: Table for describing the bands that this series was generated from.
There should be one row in this table for each band.
is_a: DynamicTable
attributes:
band_name:
name: band_name
description: Name of the band, e.g. theta.
multivalued: true
range: text
band_limits:
name: band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
multivalued: false
range: DecompositionSeries__bands__band_limits
required: true
band_mean:
name: band_mean
description: The mean Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries__bands__band_mean
required: true
band_stdev:
name: band_stdev
description: The standard deviation of Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries__bands__band_stdev
required: true
DecompositionSeries__bands__band_limits:
name: DecompositionSeries__bands__band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
is_a: VectorData
DecompositionSeries__bands__band_mean:
name: DecompositionSeries__bands__band_mean
description: The mean Gaussian filters, in Hz.
is_a: VectorData
DecompositionSeries__bands__band_stdev:
name: DecompositionSeries__bands__band_stdev
description: The standard deviation of Gaussian filters, in Hz.
is_a: VectorData
Units__spike_times_index:
name: Units__spike_times_index
description: Index into the spike_times dataset.
is_a: VectorIndex
Units__spike_times:
name: Units__spike_times
description: Spike times for each unit in seconds.
is_a: VectorData
attributes:
resolution:
name: resolution
description: The smallest possible difference between two spike times. Usually
1 divided by the acquisition sampling rate from which spike times were extracted,
but could be larger if the acquisition time series was downsampled or smaller
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
Units__obs_intervals_index:
name: Units__obs_intervals_index
description: Index into the obs_intervals dataset.
is_a: VectorIndex
Units__obs_intervals:
name: Units__obs_intervals
description: Observation intervals for each unit.
is_a: VectorData
Units__electrodes_index:
name: Units__electrodes_index
description: Index into electrodes.
is_a: VectorIndex
Units__electrodes:
name: Units__electrodes
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
is_a: DynamicTableRegion
Units__waveform_mean:
name: Units__waveform_mean
description: Spike waveform mean for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
Units__waveform_sd:
name: Units__waveform_sd
description: Spike waveform standard deviation for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
Units__waveforms:
name: Units__waveforms
description: Individual waveforms for each spike on each electrode. This is a
doubly indexed column. The 'waveforms_index' column indexes which waveforms
in this column belong to the same spike event for a given unit, where each waveform
was recorded from a different electrode. The 'waveforms_index_index' column
indexes the 'waveforms_index' column to indicate which spike events belong to
a given unit. For example, if the 'waveforms_index_index' column has values
[2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
column correspond to the 3 spike events of the second unit, and the next 1 element
of the 'waveforms_index' column corresponds to the 1 spike event of the third
unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
that were recorded from 3 different electrodes for the first spike time of the
first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
for a graphical representation of this example. When there is only one electrode
for each unit (i.e., each spike time is associated with a single waveform),
then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
the number of spike events. The number of electrodes for each spike event should
be the same within a given unit. The 'electrodes' column should be used to indicate
which electrodes are associated with each unit, and the order of the waveforms
within a given unit x spike event should be in the same order as the electrodes
referenced in the 'electrodes' column of this table. The number of samples for
each waveform must be the same.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
Units__waveforms_index:
name: Units__waveforms_index
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
is_a: VectorIndex
Units__waveforms_index_index:
name: Units__waveforms_index_index
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
is_a: VectorIndex

View file

@ -3,7 +3,10 @@ id: core.nwb.misc
imports:
- core.nwb.base
- hdmf-common.table
- core.nwb.ecephys
- nwb.language
- core.nwb.misc.include
- core.nwb.misc
default_prefix: core.nwb.misc/
classes:
AbstractFeatureSeries:
@ -23,74 +26,19 @@ classes:
name: data
description: Values of each feature at each time.
multivalued: false
range: AbstractFeatureSeries_data
range: AbstractFeatureSeries__data
required: true
feature_units:
name: feature_units
description: Units of each feature.
multivalued: false
range: AbstractFeatureSeries_feature_units
range: AbstractFeatureSeries__feature_units
required: false
features:
name: features
description: Description of the features represented in TimeSeries::data.
multivalued: false
range: AbstractFeatureSeries_features
required: true
AbstractFeatureSeries_data:
name: AbstractFeatureSeries_data
description: Values of each feature at each time.
attributes:
unit:
name: unit
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
range: text
array:
name: array
range: AbstractFeatureSeries_data_Array
AbstractFeatureSeries_data_Array:
name: AbstractFeatureSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_features:
name: num_features
range: numeric
required: false
AbstractFeatureSeries_feature_units:
name: AbstractFeatureSeries_feature_units
description: Units of each feature.
attributes:
array:
name: array
range: AbstractFeatureSeries_feature_units_Array
AbstractFeatureSeries_feature_units_Array:
name: AbstractFeatureSeries_feature_units_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
required: true
AbstractFeatureSeries_features:
name: AbstractFeatureSeries_features
description: Description of the features represented in TimeSeries::data.
attributes:
array:
name: array
range: AbstractFeatureSeries_features_Array
AbstractFeatureSeries_features_Array:
name: AbstractFeatureSeries_features_Array
is_a: Arraylike
attributes:
num_features:
name: num_features
range: text
range: AbstractFeatureSeries__features
required: true
AnnotationSeries:
name: AnnotationSeries
@ -104,32 +52,7 @@ classes:
name: data
description: Annotations made during an experiment.
multivalued: false
range: AnnotationSeries_data
required: true
AnnotationSeries_data:
name: AnnotationSeries_data
description: Annotations made during an experiment.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
array:
name: array
range: AnnotationSeries_data_Array
AnnotationSeries_data_Array:
name: AnnotationSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: text
range: AnnotationSeries__data
required: true
IntervalSeries:
name: IntervalSeries
@ -146,32 +69,7 @@ classes:
name: data
description: Use values >0 if interval started, <0 if interval ended.
multivalued: false
range: IntervalSeries_data
required: true
IntervalSeries_data:
name: IntervalSeries_data
description: Use values >0 if interval started, <0 if interval ended.
attributes:
resolution:
name: resolution
description: Smallest meaningful difference between values in data. Annotations
have no units, so the value is fixed to -1.0.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. Annotations
have no units, so the value is fixed to 'n/a'.
range: text
array:
name: array
range: IntervalSeries_data_Array
IntervalSeries_data_Array:
name: IntervalSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: int8
range: IntervalSeries__data
required: true
DecompositionSeries:
name: DecompositionSeries
@ -182,154 +80,27 @@ classes:
name: data
description: Data decomposed into frequency bands.
multivalued: false
range: DecompositionSeries_data
range: DecompositionSeries__data
required: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
multivalued: false
range: DecompositionSeries_metric
range: text
required: true
source_channels:
name: source_channels
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
multivalued: false
range: DecompositionSeries_source_channels
range: DecompositionSeries__source_channels
required: false
bands:
name: bands
description: Table for describing the bands that this series was generated
from. There should be one row in this table for each band.
multivalued: false
range: DecompositionSeries_bands
required: true
DecompositionSeries_data:
name: DecompositionSeries_data
description: Data decomposed into frequency bands.
attributes:
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
array:
name: array
range: DecompositionSeries_data_Array
DecompositionSeries_data_Array:
name: DecompositionSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: false
num_channels:
name: num_channels
range: numeric
required: false
num_bands:
name: num_bands
range: numeric
required: false
DecompositionSeries_metric:
name: DecompositionSeries_metric
description: The metric used, e.g. phase, amplitude, power.
DecompositionSeries_source_channels:
name: DecompositionSeries_source_channels
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
is_a: DynamicTableRegion
DecompositionSeries_bands:
name: DecompositionSeries_bands
description: Table for describing the bands that this series was generated from.
There should be one row in this table for each band.
is_a: DynamicTable
attributes:
band_name:
name: band_name
description: Name of the band, e.g. theta.
multivalued: false
range: DecompositionSeries_bands_band_name
required: true
band_limits:
name: band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
multivalued: false
range: DecompositionSeries_bands_band_limits
required: true
band_mean:
name: band_mean
description: The mean Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries_bands_band_mean
required: true
band_stdev:
name: band_stdev
description: The standard deviation of Gaussian filters, in Hz.
multivalued: false
range: DecompositionSeries_bands_band_stdev
required: true
DecompositionSeries_bands_band_name:
name: DecompositionSeries_bands_band_name
description: Name of the band, e.g. theta.
is_a: VectorData
DecompositionSeries_bands_band_limits:
name: DecompositionSeries_bands_band_limits
description: Low and high limit of each band in Hz. If it is a Gaussian filter,
use 2 SD on either side of the center.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_limits_Array
DecompositionSeries_bands_band_limits_Array:
name: DecompositionSeries_bands_band_limits_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
required: false
low, high:
name: low, high
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
DecompositionSeries_bands_band_mean:
name: DecompositionSeries_bands_band_mean
description: The mean Gaussian filters, in Hz.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_mean_Array
DecompositionSeries_bands_band_mean_Array:
name: DecompositionSeries_bands_band_mean_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
required: true
DecompositionSeries_bands_band_stdev:
name: DecompositionSeries_bands_band_stdev
description: The standard deviation of Gaussian filters, in Hz.
is_a: VectorData
attributes:
array:
name: array
range: DecompositionSeries_bands_band_stdev_Array
DecompositionSeries_bands_band_stdev_Array:
name: DecompositionSeries_bands_band_stdev_Array
is_a: Arraylike
attributes:
num_bands:
name: num_bands
range: float32
range: DecompositionSeries__bands
required: true
Units:
name: Units
@ -341,55 +112,54 @@ classes:
name: spike_times_index
description: Index into the spike_times dataset.
multivalued: false
range: Units_spike_times_index
range: Units__spike_times_index
required: false
spike_times:
name: spike_times
description: Spike times for each unit in seconds.
multivalued: false
range: Units_spike_times
range: Units__spike_times
required: false
obs_intervals_index:
name: obs_intervals_index
description: Index into the obs_intervals dataset.
multivalued: false
range: Units_obs_intervals_index
range: Units__obs_intervals_index
required: false
obs_intervals:
name: obs_intervals
description: Observation intervals for each unit.
multivalued: false
range: Units_obs_intervals
range: Units__obs_intervals
required: false
electrodes_index:
name: electrodes_index
description: Index into electrodes.
multivalued: false
range: Units_electrodes_index
range: Units__electrodes_index
required: false
electrodes:
name: electrodes
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
multivalued: false
range: Units_electrodes
range: Units__electrodes
required: false
electrode_group:
name: electrode_group
description: Electrode group that each spike unit came from.
multivalued: false
range: Units_electrode_group
required: false
multivalued: true
range: ElectrodeGroup
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
multivalued: false
range: Units_waveform_mean
range: Units__waveform_mean
required: false
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
multivalued: false
range: Units_waveform_sd
range: Units__waveform_sd
required: false
waveforms:
name: waveforms
@ -417,198 +187,19 @@ classes:
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
multivalued: false
range: Units_waveforms
range: Units__waveforms
required: false
waveforms_index:
name: waveforms_index
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
multivalued: false
range: Units_waveforms_index
range: Units__waveforms_index
required: false
waveforms_index_index:
name: waveforms_index_index
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
multivalued: false
range: Units_waveforms_index_index
range: Units__waveforms_index_index
required: false
Units_spike_times_index:
name: Units_spike_times_index
description: Index into the spike_times dataset.
is_a: VectorIndex
Units_spike_times:
name: Units_spike_times
description: Spike times for each unit in seconds.
is_a: VectorData
attributes:
resolution:
name: resolution
description: The smallest possible difference between two spike times. Usually
1 divided by the acquisition sampling rate from which spike times were extracted,
but could be larger if the acquisition time series was downsampled or smaller
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
Units_obs_intervals_index:
name: Units_obs_intervals_index
description: Index into the obs_intervals dataset.
is_a: VectorIndex
Units_obs_intervals:
name: Units_obs_intervals
description: Observation intervals for each unit.
is_a: VectorData
attributes:
array:
name: array
range: Units_obs_intervals_Array
Units_obs_intervals_Array:
name: Units_obs_intervals_Array
is_a: Arraylike
attributes:
num_intervals:
name: num_intervals
range: float64
required: false
start|end:
name: start|end
range: float64
required: false
minimum_cardinality: 2
maximum_cardinality: 2
Units_electrodes_index:
name: Units_electrodes_index
description: Index into electrodes.
is_a: VectorIndex
Units_electrodes:
name: Units_electrodes
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
is_a: DynamicTableRegion
Units_electrode_group:
name: Units_electrode_group
description: Electrode group that each spike unit came from.
is_a: VectorData
Units_waveform_mean:
name: Units_waveform_mean
description: Spike waveform mean for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveform_mean_Array
Units_waveform_mean_Array:
name: Units_waveform_mean_Array
is_a: Arraylike
attributes:
num_units:
name: num_units
range: float32
required: true
num_samples:
name: num_samples
range: float32
required: true
num_electrodes:
name: num_electrodes
range: float32
required: false
Units_waveform_sd:
name: Units_waveform_sd
description: Spike waveform standard deviation for each spike unit.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveform_sd_Array
Units_waveform_sd_Array:
name: Units_waveform_sd_Array
is_a: Arraylike
attributes:
num_units:
name: num_units
range: float32
required: true
num_samples:
name: num_samples
range: float32
required: true
num_electrodes:
name: num_electrodes
range: float32
required: false
Units_waveforms:
name: Units_waveforms
description: Individual waveforms for each spike on each electrode. This is a
doubly indexed column. The 'waveforms_index' column indexes which waveforms
in this column belong to the same spike event for a given unit, where each waveform
was recorded from a different electrode. The 'waveforms_index_index' column
indexes the 'waveforms_index' column to indicate which spike events belong to
a given unit. For example, if the 'waveforms_index_index' column has values
[2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
column correspond to the 3 spike events of the second unit, and the next 1 element
of the 'waveforms_index' column corresponds to the 1 spike event of the third
unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
that were recorded from 3 different electrodes for the first spike time of the
first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
for a graphical representation of this example. When there is only one electrode
for each unit (i.e., each spike time is associated with a single waveform),
then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
the number of spike events. The number of electrodes for each spike event should
be the same within a given unit. The 'electrodes' column should be used to indicate
which electrodes are associated with each unit, and the order of the waveforms
within a given unit x spike event should be in the same order as the electrodes
referenced in the 'electrodes' column of this table. The number of samples for
each waveform must be the same.
is_a: VectorData
attributes:
sampling_rate:
name: sampling_rate
description: Sampling rate, in hertz.
range: float32
unit:
name: unit
description: Unit of measurement. This value is fixed to 'volts'.
range: text
array:
name: array
range: Units_waveforms_Array
Units_waveforms_Array:
name: Units_waveforms_Array
is_a: Arraylike
attributes:
num_waveforms:
name: num_waveforms
range: numeric
required: false
num_samples:
name: num_samples
range: numeric
required: false
Units_waveforms_index:
name: Units_waveforms_index
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
is_a: VectorIndex
Units_waveforms_index_index:
name: Units_waveforms_index_index
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
is_a: VectorIndex

View file

@ -0,0 +1,24 @@
name: core.nwb.ogen.include
id: core.nwb.ogen.include
imports:
- core.nwb.base
- core.nwb.device
- nwb.language
- core.nwb.ogen.include
- core.nwb.ogen
default_prefix: core.nwb.ogen.include/
classes:
OptogeneticSeries__data:
name: OptogeneticSeries__data
description: Applied power for optogenetic stimulus, in watts.
attributes:
unit:
name: unit
description: Unit of measurement for data, which is fixed to 'watts'.
range: text
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
multivalued: true
range: numeric
required: true

View file

@ -2,7 +2,10 @@ name: core.nwb.ogen
id: core.nwb.ogen
imports:
- core.nwb.base
- core.nwb.device
- nwb.language
- core.nwb.ogen.include
- core.nwb.ogen
default_prefix: core.nwb.ogen/
classes:
OptogeneticSeries:
@ -14,26 +17,7 @@ classes:
name: data
description: Applied power for optogenetic stimulus, in watts.
multivalued: false
range: OptogeneticSeries_data
required: true
OptogeneticSeries_data:
name: OptogeneticSeries_data
description: Applied power for optogenetic stimulus, in watts.
attributes:
unit:
name: unit
description: Unit of measurement for data, which is fixed to 'watts'.
range: text
array:
name: array
range: OptogeneticSeries_data_Array
OptogeneticSeries_data_Array:
name: OptogeneticSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
range: OptogeneticSeries__data
required: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@ -44,13 +28,13 @@ classes:
name: description
description: Description of stimulation site.
multivalued: false
range: OptogeneticStimulusSite_description
range: text
required: true
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
multivalued: false
range: OptogeneticStimulusSite_excitation_lambda
range: float32
required: true
location:
name: location
@ -58,16 +42,5 @@ classes:
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: OptogeneticStimulusSite_location
range: text
required: true
OptogeneticStimulusSite_description:
name: OptogeneticStimulusSite_description
description: Description of stimulation site.
OptogeneticStimulusSite_excitation_lambda:
name: OptogeneticStimulusSite_excitation_lambda
description: Excitation wavelength, in nm.
OptogeneticStimulusSite_location:
name: OptogeneticStimulusSite_location
description: Location of the stimulation site. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard
atlas names for anatomical regions when possible.

View file

@ -0,0 +1,197 @@
name: core.nwb.ophys.include
id: core.nwb.ophys.include
imports:
- core.nwb.image
- core.nwb.base
- hdmf-common.table
- core.nwb.device
- nwb.language
- core.nwb.ophys.include
- core.nwb.ophys
default_prefix: core.nwb.ophys.include/
classes:
TwoPhotonSeries__field_of_view:
name: TwoPhotonSeries__field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: TwoPhotonSeries__field_of_view__Array
TwoPhotonSeries__field_of_view__Array:
name: TwoPhotonSeries__field_of_view__Array
is_a: Arraylike
attributes:
width|height:
name: width|height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width|height|depth:
name: width|height|depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
RoiResponseSeries__data:
name: RoiResponseSeries__data
description: Signals from ROIs.
attributes:
array:
name: array
range: RoiResponseSeries__data__Array
RoiResponseSeries__data__Array:
name: RoiResponseSeries__data__Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_ROIs:
name: num_ROIs
range: numeric
required: false
RoiResponseSeries__rois:
name: RoiResponseSeries__rois
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
is_a: DynamicTableRegion
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
description: ROI masks for each ROI. Each image mask is the size of the original
imaging plane (or volume) and members of the ROI are finite non-zero.
is_a: VectorData
PlaneSegmentation__pixel_mask_index:
name: PlaneSegmentation__pixel_mask_index
description: Index into pixel_mask.
is_a: VectorIndex
PlaneSegmentation__voxel_mask_index:
name: PlaneSegmentation__voxel_mask_index
description: Index into voxel_mask.
is_a: VectorIndex
PlaneSegmentation__reference_images:
name: PlaneSegmentation__reference_images
description: Image stacks that the segmentation masks apply to.
attributes:
ImageSeries:
name: ImageSeries
description: One or more image stacks that the masks apply to (can be one-element
stack).
multivalued: true
range: ImageSeries
required: false
ImagingPlane__manifold:
name: ImagingPlane__manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents the
position of the pixel relative to the defined coordinate space. Deprecated in
favor of origin_coords and grid_spacing.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as pixels from
x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
the 'conversion' multiplier to get from raw data acquisition pixel units
to meters is 2/1000.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'.
range: text
array:
name: array
range: ImagingPlane__manifold__Array
ImagingPlane__manifold__Array:
name: ImagingPlane__manifold__Array
is_a: Arraylike
attributes:
height:
name: height
range: float32
required: true
width:
name: width
range: float32
required: true
x, y, z:
name: x, y, z
range: float32
required: true
minimum_cardinality: 3
maximum_cardinality: 3
depth:
name: depth
range: float32
required: false
ImagingPlane__origin_coords:
name: ImagingPlane__origin_coords
description: Physical location of the first element of the imaging plane (0, 0)
for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
physical location is relative to (e.g., bregma).
attributes:
unit:
name: unit
description: Measurement units for origin_coords. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane__origin_coords__Array
ImagingPlane__origin_coords__Array:
name: ImagingPlane__origin_coords__Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
ImagingPlane__grid_spacing:
name: ImagingPlane__grid_spacing
description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
to interpret the grid.
attributes:
unit:
name: unit
description: Measurement units for grid_spacing. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane__grid_spacing__Array
ImagingPlane__grid_spacing__Array:
name: ImagingPlane__grid_spacing__Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
CorrectedImageStack__corrected:
name: CorrectedImageStack__corrected
description: Image stack with frames shifted to the common coordinates.
is_a: ImageSeries
CorrectedImageStack__xy_translation:
name: CorrectedImageStack__xy_translation
description: Stores the x,y delta necessary to align each frame to the common
coordinates, for example, to align each frame to a reference image.
is_a: TimeSeries

View file

@ -4,7 +4,10 @@ imports:
- core.nwb.image
- core.nwb.base
- hdmf-common.table
- core.nwb.device
- nwb.language
- core.nwb.ophys.include
- core.nwb.ophys
default_prefix: core.nwb.ophys/
classes:
OnePhotonSeries:
@ -57,31 +60,8 @@ classes:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
multivalued: false
range: TwoPhotonSeries_field_of_view
range: TwoPhotonSeries__field_of_view
required: false
TwoPhotonSeries_field_of_view:
name: TwoPhotonSeries_field_of_view
description: Width, height and depth of image, or imaged area, in meters.
attributes:
array:
name: array
range: TwoPhotonSeries_field_of_view_Array
TwoPhotonSeries_field_of_view_Array:
name: TwoPhotonSeries_field_of_view_Array
is_a: Arraylike
attributes:
width|height:
name: width|height
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
width|height|depth:
name: width|height|depth
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
RoiResponseSeries:
name: RoiResponseSeries
description: ROI responses over an imaging plane. The first dimension represents
@ -92,39 +72,15 @@ classes:
name: data
description: Signals from ROIs.
multivalued: false
range: RoiResponseSeries_data
range: RoiResponseSeries__data
required: true
rois:
name: rois
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
multivalued: false
range: RoiResponseSeries_rois
range: RoiResponseSeries__rois
required: true
RoiResponseSeries_data:
name: RoiResponseSeries_data
description: Signals from ROIs.
attributes:
array:
name: array
range: RoiResponseSeries_data_Array
RoiResponseSeries_data_Array:
name: RoiResponseSeries_data_Array
is_a: Arraylike
attributes:
num_times:
name: num_times
range: numeric
required: true
num_ROIs:
name: num_ROIs
range: numeric
required: false
RoiResponseSeries_rois:
name: RoiResponseSeries_rois
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
is_a: DynamicTableRegion
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@ -179,102 +135,40 @@ classes:
description: ROI masks for each ROI. Each image mask is the size of the original
imaging plane (or volume) and members of the ROI are finite non-zero.
multivalued: false
range: PlaneSegmentation_image_mask
range: PlaneSegmentation__image_mask
required: false
pixel_mask_index:
name: pixel_mask_index
description: Index into pixel_mask.
multivalued: false
range: PlaneSegmentation_pixel_mask_index
range: PlaneSegmentation__pixel_mask_index
required: false
pixel_mask:
name: pixel_mask
description: 'Pixel masks for each ROI: a list of indices and weights for
the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
multivalued: false
range: PlaneSegmentation_pixel_mask
required: false
multivalued: true
range: AnyType
voxel_mask_index:
name: voxel_mask_index
description: Index into voxel_mask.
multivalued: false
range: PlaneSegmentation_voxel_mask_index
range: PlaneSegmentation__voxel_mask_index
required: false
voxel_mask:
name: voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for
the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
multivalued: false
range: PlaneSegmentation_voxel_mask
required: false
multivalued: true
range: AnyType
reference_images:
name: reference_images
description: Image stacks that the segmentation masks apply to.
multivalued: false
range: PlaneSegmentation_reference_images
range: PlaneSegmentation__reference_images
required: true
PlaneSegmentation_image_mask:
name: PlaneSegmentation_image_mask
description: ROI masks for each ROI. Each image mask is the size of the original
imaging plane (or volume) and members of the ROI are finite non-zero.
is_a: VectorData
attributes:
array:
name: array
range: PlaneSegmentation_image_mask_Array
PlaneSegmentation_image_mask_Array:
name: PlaneSegmentation_image_mask_Array
is_a: Arraylike
attributes:
num_roi:
name: num_roi
range: AnyType
required: true
num_x:
name: num_x
range: AnyType
required: true
num_y:
name: num_y
range: AnyType
required: true
num_z:
name: num_z
range: AnyType
required: false
PlaneSegmentation_pixel_mask_index:
name: PlaneSegmentation_pixel_mask_index
description: Index into pixel_mask.
is_a: VectorIndex
PlaneSegmentation_pixel_mask:
name: PlaneSegmentation_pixel_mask
description: 'Pixel masks for each ROI: a list of indices and weights for the
ROI. Pixel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
is_a: VectorData
PlaneSegmentation_voxel_mask_index:
name: PlaneSegmentation_voxel_mask_index
description: Index into voxel_mask.
is_a: VectorIndex
PlaneSegmentation_voxel_mask:
name: PlaneSegmentation_voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
is_a: VectorData
PlaneSegmentation_reference_images:
name: PlaneSegmentation_reference_images
description: Image stacks that the segmentation masks apply to.
attributes:
ImageSeries:
name: ImageSeries
description: One or more image stacks that the masks apply to (can be one-element
stack).
multivalued: true
range: ImageSeries
required: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@ -284,26 +178,26 @@ classes:
name: description
description: Description of the imaging plane.
multivalued: false
range: ImagingPlane_description
range: text
required: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
multivalued: false
range: ImagingPlane_excitation_lambda
range: float32
required: true
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
multivalued: false
range: ImagingPlane_imaging_rate
range: float32
required: false
indicator:
name: indicator
description: Calcium indicator.
multivalued: false
range: ImagingPlane_indicator
range: text
required: true
location:
name: location
@ -311,7 +205,7 @@ classes:
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
multivalued: false
range: ImagingPlane_location
range: text
required: true
manifold:
name: manifold
@ -319,7 +213,7 @@ classes:
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
multivalued: false
range: ImagingPlane_manifold
range: ImagingPlane__manifold
required: false
origin_coords:
name: origin_coords
@ -327,7 +221,7 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
multivalued: false
range: ImagingPlane_origin_coords
range: ImagingPlane__origin_coords
required: false
grid_spacing:
name: grid_spacing
@ -335,7 +229,7 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
multivalued: false
range: ImagingPlane_grid_spacing
range: ImagingPlane__grid_spacing
required: false
reference_frame:
name: reference_frame
@ -357,7 +251,7 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
multivalued: false
range: ImagingPlane_reference_frame
range: text
required: false
OpticalChannel:
name: OpticalChannel
@ -365,147 +259,6 @@ classes:
multivalued: true
range: OpticalChannel
required: true
ImagingPlane_description:
name: ImagingPlane_description
description: Description of the imaging plane.
ImagingPlane_excitation_lambda:
name: ImagingPlane_excitation_lambda
description: Excitation wavelength, in nm.
ImagingPlane_imaging_rate:
name: ImagingPlane_imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
ImagingPlane_indicator:
name: ImagingPlane_indicator
description: Calcium indicator.
ImagingPlane_location:
name: ImagingPlane_location
description: Location of the imaging plane. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard
atlas names for anatomical regions when possible.
ImagingPlane_manifold:
name: ImagingPlane_manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents the
position of the pixel relative to the defined coordinate space. Deprecated in
favor of origin_coords and grid_spacing.
attributes:
conversion:
name: conversion
description: Scalar to multiply each element in data to convert it to the
specified 'unit'. If the data are stored in acquisition system units or
other units that require a conversion to be interpretable, multiply the
data by 'conversion' to convert the data to the specified 'unit'. e.g. if
the data acquisition system stores values in this object as pixels from
x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
the 'conversion' multiplier to get from raw data acquisition pixel units
to meters is 2/1000.
range: float32
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_manifold_Array
ImagingPlane_manifold_Array:
name: ImagingPlane_manifold_Array
is_a: Arraylike
attributes:
height:
name: height
range: float32
required: true
width:
name: width
range: float32
required: true
x, y, z:
name: x, y, z
range: float32
required: true
minimum_cardinality: 3
maximum_cardinality: 3
depth:
name: depth
range: float32
required: false
ImagingPlane_origin_coords:
name: ImagingPlane_origin_coords
description: Physical location of the first element of the imaging plane (0, 0)
for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
physical location is relative to (e.g., bregma).
attributes:
unit:
name: unit
description: Measurement units for origin_coords. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_origin_coords_Array
ImagingPlane_origin_coords_Array:
name: ImagingPlane_origin_coords_Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
ImagingPlane_grid_spacing:
name: ImagingPlane_grid_spacing
description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
to interpret the grid.
attributes:
unit:
name: unit
description: Measurement units for grid_spacing. The default value is 'meters'.
range: text
array:
name: array
range: ImagingPlane_grid_spacing_Array
ImagingPlane_grid_spacing_Array:
name: ImagingPlane_grid_spacing_Array
is_a: Arraylike
attributes:
x, y:
name: x, y
range: float32
required: false
minimum_cardinality: 2
maximum_cardinality: 2
x, y, z:
name: x, y, z
range: float32
required: false
minimum_cardinality: 3
maximum_cardinality: 3
ImagingPlane_reference_frame:
name: ImagingPlane_reference_frame
description: Describes reference frame of origin_coords and grid_spacing. For
example, this can be a text description of the anatomical location and orientation
of the grid defined by origin_coords and grid_spacing or the vectors needed
to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML).
This field is necessary to interpret origin_coords and grid_spacing. If origin_coords
and grid_spacing are not present, then this field is not required. For example,
if the microscope takes 10 x 10 x 2 images, where the first value of the data
matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma,
the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and
larger numbers in x means more anterior, larger numbers in y means more rightward,
and larger numbers in z means more ventral, then enter the following -- origin_coords
= (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin
coordinates are relative to bregma. First dimension corresponds to anterior-posterior
axis (larger index = more anterior). Second dimension corresponds to medial-lateral
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@ -515,20 +268,14 @@ classes:
name: description
description: Description or other notes about the channel.
multivalued: false
range: OpticalChannel_description
range: text
required: true
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
multivalued: false
range: OpticalChannel_emission_lambda
range: float32
required: true
OpticalChannel_description:
name: OpticalChannel_description
description: Description or other notes about the channel.
OpticalChannel_emission_lambda:
name: OpticalChannel_emission_lambda
description: Emission wavelength for channel, in nm.
MotionCorrection:
name: MotionCorrection
description: 'An image stack where all frames are shifted (registered) to a common
@ -551,21 +298,12 @@ classes:
name: corrected
description: Image stack with frames shifted to the common coordinates.
multivalued: false
range: CorrectedImageStack_corrected
range: CorrectedImageStack__corrected
required: true
xy_translation:
name: xy_translation
description: Stores the x,y delta necessary to align each frame to the common
coordinates, for example, to align each frame to a reference image.
multivalued: false
range: CorrectedImageStack_xy_translation
range: CorrectedImageStack__xy_translation
required: true
CorrectedImageStack_corrected:
name: CorrectedImageStack_corrected
description: Image stack with frames shifted to the common coordinates.
is_a: ImageSeries
CorrectedImageStack_xy_translation:
name: CorrectedImageStack_xy_translation
description: Stores the x,y delta necessary to align each frame to the common
coordinates, for example, to align each frame to a reference image.
is_a: TimeSeries

View file

@ -0,0 +1,262 @@
name: core.nwb.retinotopy.include
id: core.nwb.retinotopy.include
imports:
- core.nwb.base
- nwb.language
- core.nwb.retinotopy.include
- core.nwb.retinotopy
default_prefix: core.nwb.retinotopy.include/
classes:
ImagingRetinotopy__axis_1_phase_map:
name: ImagingRetinotopy__axis_1_phase_map
description: Phase response to stimulus on the first measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy__axis_1_phase_map__Array
ImagingRetinotopy__axis_1_phase_map__Array:
name: ImagingRetinotopy__axis_1_phase_map__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy__axis_1_power_map:
name: ImagingRetinotopy__axis_1_power_map
description: Power response on the first measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy__axis_1_power_map__Array
ImagingRetinotopy__axis_1_power_map__Array:
name: ImagingRetinotopy__axis_1_power_map__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy__axis_2_phase_map:
name: ImagingRetinotopy__axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy__axis_2_phase_map__Array
ImagingRetinotopy__axis_2_phase_map__Array:
name: ImagingRetinotopy__axis_2_phase_map__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy__axis_2_power_map:
name: ImagingRetinotopy__axis_2_power_map
description: Power response on the second measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy__axis_2_power_map__Array
ImagingRetinotopy__axis_2_power_map__Array:
name: ImagingRetinotopy__axis_2_power_map__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy__axis_descriptions:
name: ImagingRetinotopy__axis_descriptions
description: Two-element array describing the contents of the two response axis
fields. Description should be something like ['altitude', 'azimuth'] or '['radius',
'theta'].
attributes:
axis_descriptions:
name: axis_descriptions
description: Two-element array describing the contents of the two response
axis fields. Description should be something like ['altitude', 'azimuth']
or '['radius', 'theta'].
multivalued: true
range: text
required: true
ImagingRetinotopy__focal_depth_image:
name: ImagingRetinotopy__focal_depth_image
description: 'Gray-scale image taken with same settings/parameters (e.g., focal
depth, wavelength) as data collection. Array format: [rows][columns].'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy__focal_depth_image__Array
ImagingRetinotopy__focal_depth_image__Array:
name: ImagingRetinotopy__focal_depth_image__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false
ImagingRetinotopy__sign_map:
name: ImagingRetinotopy__sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
array:
name: array
range: ImagingRetinotopy__sign_map__Array
ImagingRetinotopy__sign_map__Array:
name: ImagingRetinotopy__sign_map__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy__vasculature_image:
name: ImagingRetinotopy__vasculature_image
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy__vasculature_image__Array
ImagingRetinotopy__vasculature_image__Array:
name: ImagingRetinotopy__vasculature_image__Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false

View file

@ -3,6 +3,8 @@ id: core.nwb.retinotopy
imports:
- core.nwb.base
- nwb.language
- core.nwb.retinotopy.include
- core.nwb.retinotopy
default_prefix: core.nwb.retinotopy/
classes:
ImagingRetinotopy:
@ -22,27 +24,27 @@ classes:
name: axis_1_phase_map
description: Phase response to stimulus on the first measured axis.
multivalued: false
range: ImagingRetinotopy_axis_1_phase_map
range: ImagingRetinotopy__axis_1_phase_map
required: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
multivalued: false
range: ImagingRetinotopy_axis_1_power_map
range: ImagingRetinotopy__axis_1_power_map
required: false
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
multivalued: false
range: ImagingRetinotopy_axis_2_phase_map
range: ImagingRetinotopy__axis_2_phase_map
required: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
multivalued: false
range: ImagingRetinotopy_axis_2_power_map
range: ImagingRetinotopy__axis_2_power_map
required: false
axis_descriptions:
name: axis_descriptions
@ -50,284 +52,26 @@ classes:
axis fields. Description should be something like ['altitude', 'azimuth']
or '['radius', 'theta'].
multivalued: false
range: ImagingRetinotopy_axis_descriptions
range: ImagingRetinotopy__axis_descriptions
required: true
focal_depth_image:
name: focal_depth_image
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
multivalued: false
range: ImagingRetinotopy_focal_depth_image
range: ImagingRetinotopy__focal_depth_image
required: false
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
multivalued: false
range: ImagingRetinotopy_sign_map
range: ImagingRetinotopy__sign_map
required: false
vasculature_image:
name: vasculature_image
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
multivalued: false
range: ImagingRetinotopy_vasculature_image
range: ImagingRetinotopy__vasculature_image
required: true
ImagingRetinotopy_axis_1_phase_map:
name: ImagingRetinotopy_axis_1_phase_map
description: Phase response to stimulus on the first measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_1_phase_map_Array
ImagingRetinotopy_axis_1_phase_map_Array:
name: ImagingRetinotopy_axis_1_phase_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_1_power_map:
name: ImagingRetinotopy_axis_1_power_map
description: Power response on the first measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_1_power_map_Array
ImagingRetinotopy_axis_1_power_map_Array:
name: ImagingRetinotopy_axis_1_power_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_2_phase_map:
name: ImagingRetinotopy_axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_2_phase_map_Array
ImagingRetinotopy_axis_2_phase_map_Array:
name: ImagingRetinotopy_axis_2_phase_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_2_power_map:
name: ImagingRetinotopy_axis_2_power_map
description: Power response on the second measured axis. Response is scaled so
0.0 is no power in the response and 1.0 is maximum relative power.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
array:
name: array
range: ImagingRetinotopy_axis_2_power_map_Array
ImagingRetinotopy_axis_2_power_map_Array:
name: ImagingRetinotopy_axis_2_power_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_axis_descriptions:
name: ImagingRetinotopy_axis_descriptions
description: Two-element array describing the contents of the two response axis
fields. Description should be something like ['altitude', 'azimuth'] or '['radius',
'theta'].
attributes:
array:
name: array
range: ImagingRetinotopy_axis_descriptions_Array
ImagingRetinotopy_axis_descriptions_Array:
name: ImagingRetinotopy_axis_descriptions_Array
is_a: Arraylike
attributes:
axis_1, axis_2:
name: axis_1, axis_2
range: text
required: true
minimum_cardinality: 2
maximum_cardinality: 2
ImagingRetinotopy_focal_depth_image:
name: ImagingRetinotopy_focal_depth_image
description: 'Gray-scale image taken with same settings/parameters (e.g., focal
depth, wavelength) as data collection. Array format: [rows][columns].'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy_focal_depth_image_Array
ImagingRetinotopy_focal_depth_image_Array:
name: ImagingRetinotopy_focal_depth_image_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false
ImagingRetinotopy_sign_map:
name: ImagingRetinotopy_sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
attributes:
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
array:
name: array
range: ImagingRetinotopy_sign_map_Array
ImagingRetinotopy_sign_map_Array:
name: ImagingRetinotopy_sign_map_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: float32
required: false
num_cols:
name: num_cols
range: float32
required: false
ImagingRetinotopy_vasculature_image:
name: ImagingRetinotopy_vasculature_image
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
attributes:
bits_per_pixel:
name: bits_per_pixel
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
array:
name: array
range: ImagingRetinotopy_vasculature_image_Array
ImagingRetinotopy_vasculature_image_Array:
name: ImagingRetinotopy_vasculature_image_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint16
required: false
num_cols:
name: num_cols
range: uint16
required: false

View file

@ -0,0 +1,7 @@
name: hdmf-common.base.include
id: hdmf-common.base.include
imports:
- nwb.language
- hdmf-common.base.include
- hdmf-common.base
default_prefix: hdmf-common.base.include/

View file

@ -2,6 +2,8 @@ name: hdmf-common.base
id: hdmf-common.base
imports:
- nwb.language
- hdmf-common.base.include
- hdmf-common.base
default_prefix: hdmf-common.base/
classes:
Data:

View file

@ -0,0 +1,39 @@
name: hdmf-common.sparse.include
id: hdmf-common.sparse.include
imports:
- hdmf-common.base
- nwb.language
- hdmf-common.sparse.include
- hdmf-common.sparse
default_prefix: hdmf-common.sparse.include/
classes:
CSRMatrix__indices:
name: CSRMatrix__indices
description: The column indices.
attributes:
indices:
name: indices
description: The column indices.
multivalued: true
range: uint
required: true
CSRMatrix__indptr:
name: CSRMatrix__indptr
description: The row index pointer.
attributes:
indptr:
name: indptr
description: The row index pointer.
multivalued: true
range: uint
required: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
data:
name: data
description: The non-zero values in the matrix.
multivalued: true
range: AnyType
required: true

View file

@ -3,6 +3,8 @@ id: hdmf-common.sparse
imports:
- hdmf-common.base
- nwb.language
- hdmf-common.sparse.include
- hdmf-common.sparse
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
@ -21,62 +23,17 @@ classes:
name: indices
description: The column indices.
multivalued: false
range: CSRMatrix_indices
range: CSRMatrix__indices
required: true
indptr:
name: indptr
description: The row index pointer.
multivalued: false
range: CSRMatrix_indptr
range: CSRMatrix__indptr
required: true
data:
name: data
description: The non-zero values in the matrix.
multivalued: false
range: CSRMatrix_data
required: true
CSRMatrix_indices:
name: CSRMatrix_indices
description: The column indices.
attributes:
array:
name: array
range: CSRMatrix_indices_Array
CSRMatrix_indices_Array:
name: CSRMatrix_indices_Array
is_a: Arraylike
attributes:
number of non-zero values:
name: number of non-zero values
range: uint
required: true
CSRMatrix_indptr:
name: CSRMatrix_indptr
description: The row index pointer.
attributes:
array:
name: array
range: CSRMatrix_indptr_Array
CSRMatrix_indptr_Array:
name: CSRMatrix_indptr_Array
is_a: Arraylike
attributes:
number of rows in the matrix + 1:
name: number of rows in the matrix + 1
range: uint
required: true
CSRMatrix_data:
name: CSRMatrix_data
description: The non-zero values in the matrix.
attributes:
array:
name: array
range: CSRMatrix_data_Array
CSRMatrix_data_Array:
name: CSRMatrix_data_Array
is_a: Arraylike
attributes:
number of non-zero values:
name: number of non-zero values
range: AnyType
range: CSRMatrix__data
required: true

View file

@ -0,0 +1,48 @@
name: hdmf-common.table.include
id: hdmf-common.table.include
imports:
- hdmf-common.base
- nwb.language
- hdmf-common.table.include
- hdmf-common.table
default_prefix: hdmf-common.table.include/
classes:
VectorData__Array:
name: VectorData__Array
is_a: Arraylike
attributes:
dim0:
name: dim0
range: AnyType
required: true
dim1:
name: dim1
range: AnyType
required: false
dim2:
name: dim2
range: AnyType
required: false
dim3:
name: dim3
range: AnyType
required: false
ElementIdentifiers__Array:
name: ElementIdentifiers__Array
is_a: Arraylike
attributes:
num_elements:
name: num_elements
range: int
required: true
DynamicTable__id:
name: DynamicTable__id
description: Array of unique identifiers for the rows of this dynamic table.
is_a: ElementIdentifiers
attributes:
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
multivalued: true
range: int
required: true

View file

@ -3,6 +3,8 @@ id: hdmf-common.table
imports:
- hdmf-common.base
- nwb.language
- hdmf-common.table.include
- hdmf-common.table
default_prefix: hdmf-common.table/
classes:
VectorData:
@ -23,27 +25,7 @@ classes:
range: text
array:
name: array
range: VectorData_Array
VectorData_Array:
name: VectorData_Array
is_a: Arraylike
attributes:
dim0:
name: dim0
range: AnyType
required: true
dim1:
name: dim1
range: AnyType
required: false
dim2:
name: dim2
range: AnyType
required: false
dim3:
name: dim3
range: AnyType
required: false
range: VectorData__Array
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
@ -57,17 +39,6 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
array:
name: array
range: VectorIndex_Array
VectorIndex_Array:
name: VectorIndex_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: uint8
required: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
@ -76,15 +47,7 @@ classes:
attributes:
array:
name: array
range: ElementIdentifiers_Array
ElementIdentifiers_Array:
name: ElementIdentifiers_Array
is_a: Arraylike
attributes:
num_elements:
name: num_elements
range: int
required: true
range: ElementIdentifiers__Array
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
@ -107,17 +70,6 @@ classes:
name: description
description: Description of what this table region points to.
range: text
array:
name: array
range: DynamicTableRegion_Array
DynamicTableRegion_Array:
name: DynamicTableRegion_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: int
required: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
@ -153,7 +105,7 @@ classes:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
multivalued: false
range: DynamicTable_id
range: DynamicTable__id
required: true
VectorData:
name: VectorData
@ -161,22 +113,6 @@ classes:
multivalued: true
range: VectorData
required: false
DynamicTable_id:
name: DynamicTable_id
description: Array of unique identifiers for the rows of this dynamic table.
is_a: ElementIdentifiers
attributes:
array:
name: array
range: DynamicTable_id_Array
DynamicTable_id_Array:
name: DynamicTable_id_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: int
required: true
AlignedDynamicTable:
name: AlignedDynamicTable
description: DynamicTable container that supports storing a collection of sub-tables.

View file

@ -0,0 +1,8 @@
name: hdmf-experimental.experimental.include
id: hdmf-experimental.experimental.include
imports:
- hdmf-common.table
- nwb.language
- hdmf-experimental.experimental.include
- hdmf-experimental.experimental
default_prefix: hdmf-experimental.experimental.include/

View file

@ -3,6 +3,8 @@ id: hdmf-experimental.experimental
imports:
- hdmf-common.table
- nwb.language
- hdmf-experimental.experimental.include
- hdmf-experimental.experimental
default_prefix: hdmf-experimental.experimental/
classes:
EnumData:

View file

@ -0,0 +1,79 @@
name: hdmf-experimental.resources.include
id: hdmf-experimental.resources.include
imports:
- hdmf-common.base
- nwb.language
- hdmf-experimental.resources.include
- hdmf-experimental.resources
default_prefix: hdmf-experimental.resources.include/
classes:
HERD__keys:
name: HERD__keys
description: A table for storing user terms that are used to refer to external
resources.
is_a: Data
attributes:
keys:
name: keys
description: A table for storing user terms that are used to refer to external
resources.
multivalued: true
range: AnyType
required: true
HERD__files:
name: HERD__files
description: A table for storing object ids of files used in external resources.
is_a: Data
attributes:
files:
name: files
description: A table for storing object ids of files used in external resources.
multivalued: true
range: AnyType
required: true
HERD__entities:
name: HERD__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
is_a: Data
attributes:
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
multivalued: true
range: AnyType
required: true
HERD__objects:
name: HERD__objects
description: A table for identifying which objects in a file contain references
to external resources.
is_a: Data
attributes:
objects:
name: objects
description: A table for identifying which objects in a file contain references
to external resources.
multivalued: true
range: AnyType
required: true
HERD__object_keys:
name: HERD__object_keys
description: A table for identifying which objects use which keys.
is_a: Data
attributes:
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
multivalued: true
range: AnyType
required: true
HERD__entity_keys:
name: HERD__entity_keys
description: A table for identifying which keys use which entity.
is_a: Data
attributes:
entity_keys:
name: entity_keys
description: A table for identifying which keys use which entity.
multivalued: true
range: AnyType
required: true

View file

@ -3,6 +3,8 @@ id: hdmf-experimental.resources
imports:
- hdmf-common.base
- nwb.language
- hdmf-experimental.resources.include
- hdmf-experimental.resources
default_prefix: hdmf-experimental.resources/
classes:
HERD:
@ -16,134 +18,36 @@ classes:
description: A table for storing user terms that are used to refer to external
resources.
multivalued: false
range: HERD_keys
range: HERD__keys
required: true
files:
name: files
description: A table for storing object ids of files used in external resources.
multivalued: false
range: HERD_files
range: HERD__files
required: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
multivalued: false
range: HERD_entities
range: HERD__entities
required: true
objects:
name: objects
description: A table for identifying which objects in a file contain references
to external resources.
multivalued: false
range: HERD_objects
range: HERD__objects
required: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
multivalued: false
range: HERD_object_keys
range: HERD__object_keys
required: true
entity_keys:
name: entity_keys
description: A table for identifying which keys use which entity.
multivalued: false
range: HERD_entity_keys
required: true
HERD_keys:
name: HERD_keys
description: A table for storing user terms that are used to refer to external
resources.
is_a: Data
attributes:
array:
name: array
range: HERD_keys_Array
HERD_keys_Array:
name: HERD_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_files:
name: HERD_files
description: A table for storing object ids of files used in external resources.
is_a: Data
attributes:
array:
name: array
range: HERD_files_Array
HERD_files_Array:
name: HERD_files_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_entities:
name: HERD_entities
description: A table for mapping user terms (i.e., keys) to resource entities.
is_a: Data
attributes:
array:
name: array
range: HERD_entities_Array
HERD_entities_Array:
name: HERD_entities_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_objects:
name: HERD_objects
description: A table for identifying which objects in a file contain references
to external resources.
is_a: Data
attributes:
array:
name: array
range: HERD_objects_Array
HERD_objects_Array:
name: HERD_objects_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_object_keys:
name: HERD_object_keys
description: A table for identifying which objects use which keys.
is_a: Data
attributes:
array:
name: array
range: HERD_object_keys_Array
HERD_object_keys_Array:
name: HERD_object_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
required: true
HERD_entity_keys:
name: HERD_entity_keys
description: A table for identifying which keys use which entity.
is_a: Data
attributes:
array:
name: array
range: HERD_entity_keys_Array
HERD_entity_keys_Array:
name: HERD_entity_keys_Array
is_a: Arraylike
attributes:
num_rows:
name: num_rows
range: AnyType
range: HERD__entity_keys
required: true

66
poetry.lock generated
View file

@ -917,24 +917,59 @@ extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.1
test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"]
[[package]]
name = "nwb-schema-language"
version = "0.1.0"
description = "Translation of the nwb-schema-language to LinkML"
name = "nptyping"
version = "2.5.0"
description = "Type hints for NumPy."
optional = false
python-versions = "^3.9"
files = []
develop = true
python-versions = ">=3.7"
files = [
{file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"},
{file = "nptyping-2.5.0.tar.gz", hash = "sha256:e3d35b53af967e6fb407c3016ff9abae954d3a0568f7cc13a461084224e8e20a"},
]
[package.dependencies]
linkml-runtime = "^1.1.24"
pydantic = "<2"
numpy = {version = ">=1.20.0,<2.0.0", markers = "python_version >= \"3.8\""}
[package.extras]
docs = []
build = ["invoke (>=1.6.0)", "pip-tools (>=6.5.0)"]
complete = ["pandas", "pandas-stubs-fork"]
dev = ["autoflake", "beartype (<0.10.0)", "beartype (>=0.10.0)", "black", "codecov (>=2.1.0)", "coverage", "feedparser", "invoke (>=1.6.0)", "isort", "mypy", "pandas", "pandas-stubs-fork", "pip-tools (>=6.5.0)", "pylint", "pyright", "setuptools", "typeguard", "wheel"]
pandas = ["pandas", "pandas-stubs-fork"]
qa = ["autoflake", "beartype (<0.10.0)", "beartype (>=0.10.0)", "black", "codecov (>=2.1.0)", "coverage", "feedparser", "isort", "mypy", "pylint", "pyright", "setuptools", "typeguard", "wheel"]
[package.source]
type = "directory"
url = "nwb_schema_language"
[[package]]
name = "numpy"
version = "1.25.2"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"},
{file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"},
{file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"},
{file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"},
{file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"},
{file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"},
{file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"},
{file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"},
{file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"},
{file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"},
{file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"},
{file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"},
{file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"},
{file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"},
{file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"},
{file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"},
{file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"},
{file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"},
{file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"},
{file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"},
{file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"},
{file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"},
{file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"},
{file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"},
{file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"},
]
[[package]]
name = "openpyxl"
@ -2019,7 +2054,12 @@ files = [
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[extras]
dev = []
plot = ["dash", "dash-cytoscape"]
tests = ["pytest", "pytest-depends"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "8b70e71931e2f519212b40f0dfcb9cc597e9f4894dbf392a0232df9060a35ee1"
content-hash = "d76271be11498c9055d0692800a4e1ddcb81d9807e6ff993472ea2ab7eb076b0"

View file

@ -22,6 +22,7 @@ pytest = { version="^7.4.0", optional=true}
pytest-depends = {version="^1.0.1", optional=true}
dash = {version="^2.12.1", optional=true}
dash-cytoscape = {version="^0.3.0", optional=true}
nptyping = "^2.5.0"
[tool.poetry.extras]
dev = ["nwb_schema_language"]

View file

@ -1,7 +1,7 @@
from argparse import ArgumentParser
from pathlib import Path
from linkml_runtime.dumpers import yaml_dumper
from linkml.generators import PydanticGenerator
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml import io
@ -14,9 +14,10 @@ def generate_core_yaml(output_path:Path):
def generate_core_pydantic(yaml_path:Path, output_path:Path):
for schema in yaml_path.glob('*.yaml'):
pydantic_file = (output_path / schema.name).with_suffix('.py')
python_name = schema.stem.replace('.', '_').replace('-', '_')
pydantic_file = (output_path / python_name).with_suffix('.py')
generator = PydanticGenerator(
generator = NWBPydanticGenerator(
str(schema),
pydantic_version='1',
emit_metadata=True,

View file

@ -6,6 +6,7 @@ import warnings
from .fixtures import nwb_core_fixture, tmp_output_dir
from linkml_runtime.dumpers import yaml_dumper
from linkml.generators import PydanticGenerator
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.lang_elements import NwbLangSchema
@ -15,8 +16,11 @@ def test_generate_nwblang(tmp_output_dir):
def test_generate_core(nwb_core_fixture, tmp_output_dir):
schemas = nwb_core_fixture.build().schemas
(tmp_output_dir / 'schema').mkdir(exist_ok=True)
for schema in schemas:
output_file = tmp_output_dir / (schema.name + '.yaml')
output_file = tmp_output_dir / 'schema' / (schema.name + '.yaml')
yaml_dumper.dump(schema, output_file)
@pytest.mark.depends(on=['test_generate_core'])
@ -26,10 +30,15 @@ def test_generate_pydantic(tmp_output_dir):
# core_file = tmp_output_dir / 'core.yaml'
# pydantic_file = tmp_output_dir / 'core.py'
for schema in tmp_output_dir.glob('*.yaml'):
pydantic_file = (schema.parent / schema.name).with_suffix('.py')
(tmp_output_dir / 'models').mkdir(exist_ok=True)
generator = PydanticGenerator(
for schema in (tmp_output_dir / 'schema').glob('*.yaml'):
# python friendly name
python_name = schema.stem.replace('.', '_').replace('-','_')
pydantic_file = (schema.parent.parent / 'models' / python_name).with_suffix('.py')
generator = NWBPydanticGenerator(
str(schema),
pydantic_version='1',
emit_metadata=True,
@ -37,5 +46,7 @@ def test_generate_pydantic(tmp_output_dir):
gen_slots=True
)
gen_pydantic = generator.serialize()
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)