diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index b1283e1..e89654a 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -46,6 +46,10 @@ jobs:
run: pytest
working-directory: nwb_linkml
+ - name: Run nwb_schema_language Tests
+ run: pytest
+ working-directory: nwb_schema_language
+
- name: Coveralls Parallel
uses: coverallsapp/github-action@v2.3.0
if: runner.os != 'macOS'
diff --git a/docs/meta/todo.md b/docs/meta/todo.md
index dd9f750..9199d22 100644
--- a/docs/meta/todo.md
+++ b/docs/meta/todo.md
@@ -53,6 +53,9 @@ Loading
- [ ] Top-level containers are still a little janky, eg. how `ProcessingModule` just accepts
extra args rather than properly abstracting `value` as a `__getitem__(self, key) -> T:`
+Changes to linkml
+- [ ] Allow parameterizing "extra" fields, so we don't have to stuff things into `value` dicts
+
## Docs TODOs
```{todolist}
diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock
index f6f2c7c..36e3896 100644
--- a/nwb_linkml/pdm.lock
+++ b/nwb_linkml/pdm.lock
@@ -5,7 +5,7 @@
groups = ["default", "dev", "plot", "tests"]
strategy = ["inherit_metadata"]
lock_version = "4.5.0"
-content_hash = "sha256:1c297e11f6dc9e4f6b8d29df872177d2ce65bbd334c0b65aa5175dfb125c4d9f"
+content_hash = "sha256:14dd3d0b396dc25e554b924825664346d2644f265e48346180f1cfdf833a8c92"
[[metadata.targets]]
requires_python = ">=3.10,<3.13"
@@ -1038,9 +1038,9 @@ files = [
[[package]]
name = "numpydantic"
-version = "1.3.3"
+version = "1.6.0"
requires_python = "<4.0,>=3.9"
-summary = "Type and shape validation and serialization for numpy arrays in pydantic models"
+summary = "Type and shape validation and serialization for arbitrary array types in pydantic models"
groups = ["default"]
dependencies = [
"numpy>=1.24.0",
@@ -1048,13 +1048,13 @@ dependencies = [
"typing-extensions>=4.11.0; python_version < \"3.11\"",
]
files = [
- {file = "numpydantic-1.3.3-py3-none-any.whl", hash = "sha256:e002767252b1b77abb7715834ab7cbf58964baddae44863710f09e71b23287e4"},
- {file = "numpydantic-1.3.3.tar.gz", hash = "sha256:1cc2744f7b5fbcecd51a64fafaf8c9a564bb296336a566a16be97ba7b1c28698"},
+ {file = "numpydantic-1.6.0-py3-none-any.whl", hash = "sha256:72f3ef0bc8a5801bac6fb79920467d763d51cddec8476875efeb5064c11c04cf"},
+ {file = "numpydantic-1.6.0.tar.gz", hash = "sha256:9785ba7eb5489b9e5438109e9b2dcd1cc0aa87d1b6b5df71fb906dc0708df83c"},
]
[[package]]
name = "nwb-models"
-version = "0.1.0"
+version = "0.2.0"
requires_python = ">=3.10"
summary = "Pydantic/LinkML models for Neurodata Without Borders"
groups = ["default"]
@@ -1064,23 +1064,23 @@ dependencies = [
"pydantic>=2.3.0",
]
files = [
- {file = "nwb_models-0.1.0-py3-none-any.whl", hash = "sha256:d485422865f6762586e8f8389d67bce17a3e66d07f6273385a751145afbbbfea"},
- {file = "nwb_models-0.1.0.tar.gz", hash = "sha256:3c3ccfc6c2ac03dffe26ba7f180aecc650d6593c05d4f306f84b90fabc3ff2b8"},
+ {file = "nwb_models-0.2.0-py3-none-any.whl", hash = "sha256:72bb8a8879261488071d4e8eff35f2cbb20c44ac4bb7f67806c6329b4f8b2068"},
+ {file = "nwb_models-0.2.0.tar.gz", hash = "sha256:7e7f280378c668e1695dd9d53b32073d85615e90fee0ec417888dd83bdb9cbb3"},
]
[[package]]
name = "nwb-schema-language"
-version = "0.1.3"
-requires_python = ">=3.9,<4.0"
+version = "0.2.0"
+requires_python = "<3.13,>=3.10"
summary = "Translation of the nwb-schema-language to LinkML"
groups = ["default"]
dependencies = [
- "linkml-runtime<2.0.0,>=1.1.24",
- "pydantic<3.0.0,>=2.3.0",
+ "linkml-runtime>=1.7.7",
+ "pydantic>=2.3.0",
]
files = [
- {file = "nwb_schema_language-0.1.3-py3-none-any.whl", hash = "sha256:2eb86aac6614d490f7ec3fa68634bb9dceb3834d9820f5afc5645a9f3b0c3401"},
- {file = "nwb_schema_language-0.1.3.tar.gz", hash = "sha256:ad290e2896a9cde7e2f353bc3b8ddf42be865238d991167d397ff2e0d03c88ba"},
+ {file = "nwb_schema_language-0.2.0-py3-none-any.whl", hash = "sha256:354afb0abfbc61a6d6b227695b9a4312df5030f2746b517fc5849ac085c8e5f2"},
+ {file = "nwb_schema_language-0.2.0.tar.gz", hash = "sha256:59beda56ea52a55f4514d7e4b73e30ceaee1c60b7ddf4fc80afd48777acf9e50"},
]
[[package]]
diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml
index c8ccd36..2670310 100644
--- a/nwb_linkml/pyproject.toml
+++ b/nwb_linkml/pyproject.toml
@@ -12,7 +12,7 @@ dependencies = [
"nwb-models>=0.2.0",
"pyyaml>=6.0",
"linkml-runtime>=1.7.7",
- "nwb-schema-language>=0.1.3",
+ "nwb-schema-language>=0.2.0",
"rich>=13.5.2",
#"linkml>=1.7.10",
"linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml",
@@ -22,7 +22,7 @@ dependencies = [
"pydantic-settings>=2.0.3",
"tqdm>=4.66.1",
'typing-extensions>=4.12.2;python_version<"3.11"',
- "numpydantic>=1.5.0",
+ "numpydantic>=1.6.0",
"black>=24.4.2",
"pandas>=2.2.2",
"networkx>=3.3",
diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py
index acbc896..07c5231 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py
@@ -17,9 +17,10 @@ from linkml_runtime.linkml_model import (
SlotDefinition,
TypeDefinition,
)
-from pydantic import BaseModel
+from pydantic import BaseModel, PrivateAttr
from nwb_linkml.logging import init_logger
+from nwb_linkml.maps.dtype import float_types, integer_types, string_types
from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema
if sys.version_info.minor >= 11:
@@ -103,6 +104,7 @@ class Adapter(BaseModel):
_logger: Optional[Logger] = None
_debug: Optional[bool] = None
+ _nwb_classes: dict[str, Dataset | Group] = PrivateAttr(default_factory=dict)
@property
def debug(self) -> bool:
@@ -135,7 +137,10 @@ class Adapter(BaseModel):
Convenience wrapper around :meth:`.walk_field_values`
"""
- return next(self.walk_field_values(self, "neurodata_type_def", name))
+ if name not in self._nwb_classes:
+ cls = next(self.walk_field_values(self, "neurodata_type_def", name))
+ self._nwb_classes[name] = cls
+ return self._nwb_classes[name]
def get_model_with_field(self, field: str) -> Generator[Union[Group, Dataset], None, None]:
"""
@@ -170,6 +175,10 @@ class Adapter(BaseModel):
# so skip to avoid combinatoric walking
if key == "imports" and type(input).__name__ == "SchemaAdapter":
continue
+ # nwb_schema_language objects have a reference to their parent,
+ # which causes cycles
+ if key == "parent":
+ continue
val = getattr(input, key)
yield (key, val)
if isinstance(val, (BaseModel, dict, list)):
@@ -300,5 +309,85 @@ def has_attrs(cls: Dataset) -> bool:
return (
cls.attributes is not None
and len(cls.attributes) > 0
- and all([not a.value for a in cls.attributes])
+ and any([not a.value for a in cls.attributes])
+ )
+
+
+def defaults(cls: Dataset | Attribute) -> dict:
+ """
+ Handle default values -
+
+ * If ``value`` is present, yield `equals_string` or `equals_number` depending on dtype
+ **as well as** an ``ifabsent`` value - we both constrain the possible values to 1
+ and also supply it as the default
+ * else, if ``default_value`` is present, yield an appropriate ``ifabsent`` value
+ * If neither, yield an empty dict
+
+ Unlike nwb_schema_language, when ``value`` is set, we yield both a ``equals_*`` constraint
+ and an ``ifabsent`` constraint, because an ``equals_*`` can be declared without a default
+ in order to validate that a value is correctly set as the constrained value, and fail
+ if a value isn't provided.
+ """
+ ret = {}
+ if cls.value:
+ if cls.dtype in integer_types:
+ ret["equals_number"] = cls.value
+ ret["ifabsent"] = f"integer({cls.value})"
+ elif cls.dtype in float_types:
+ ret["equals_number"] = cls.value
+ ret["ifabsent"] = f"float({cls.value})"
+ elif cls.dtype in string_types:
+ ret["equals_string"] = cls.value
+ ret["ifabsent"] = f"string({cls.value})"
+ else:
+ ret["equals_string"] = cls.value
+ ret["ifabsent"] = cls.value
+
+ elif cls.default_value:
+ if cls.dtype in string_types:
+ ret["ifabsent"] = f"string({cls.default_value})"
+ elif cls.dtype in integer_types:
+ ret["ifabsent"] = f"int({cls.default_value})"
+ elif cls.dtype in float_types:
+ ret["ifabsent"] = f"float({cls.default_value})"
+ else:
+ ret["ifabsent"] = cls.default_value
+
+ return ret
+
+
+def is_container(group: Group) -> bool:
+ """
+ Check if a group is a container group.
+
+ i.e. a group that...
+ * has no name
+ * multivalued quantity
+ * has a ``neurodata_type_inc``
+ * has no ``neurodata_type_def``
+ * has no sub-groups
+ * has no datasets
+ * has no attributes
+
+ Examples:
+
+ .. code-block:: yaml
+
+ - name: templates
+ groups:
+ - neurodata_type_inc: TimeSeries
+ doc: TimeSeries objects containing template data of presented stimuli.
+ quantity: '*'
+ - neurodata_type_inc: Images
+ doc: Images objects containing images of presented stimuli.
+ quantity: '*'
+ """
+ return (
+ not group.name
+ and group.quantity == "*"
+ and group.neurodata_type_inc
+ and not group.neurodata_type_def
+ and not group.datasets
+ and not group.groups
+ and not group.attributes
)
diff --git a/nwb_linkml/src/nwb_linkml/adapters/attribute.py b/nwb_linkml/src/nwb_linkml/adapters/attribute.py
index 7ae2ea1..8326a51 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/attribute.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/attribute.py
@@ -7,26 +7,13 @@ from typing import ClassVar, Optional, Type, TypedDict
from linkml_runtime.linkml_model.meta import SlotDefinition
-from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d
+from nwb_linkml.adapters.adapter import Adapter, BuildResult, defaults, is_1d
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.maps import Map
from nwb_linkml.maps.dtype import handle_dtype, inlined
from nwb_schema_language import Attribute
-def _make_ifabsent(val: str | int | float | None) -> str | None:
- if val is None:
- return None
- elif isinstance(val, str):
- return f"string({val})"
- elif isinstance(val, int):
- return f"integer({val})"
- elif isinstance(val, float):
- return f"float({val})"
- else:
- return str(val)
-
-
class AttrDefaults(TypedDict):
"""Default fields for an attribute"""
@@ -38,31 +25,6 @@ class AttrDefaults(TypedDict):
class AttributeMap(Map):
"""Base class for attribute mapping transformations :)"""
- @classmethod
- def handle_defaults(cls, attr: Attribute) -> AttrDefaults:
- """
- Construct arguments for linkml slot default metaslots from nwb schema lang attribute props
- """
- equals_string = None
- equals_number = None
- default_value = None
- if attr.value:
- if isinstance(attr.value, (int, float)):
- equals_number = attr.value
- elif attr.value:
- equals_string = str(attr.value)
-
- if equals_number:
- default_value = _make_ifabsent(equals_number)
- elif equals_string:
- default_value = _make_ifabsent(equals_string)
- elif attr.default_value:
- default_value = _make_ifabsent(attr.default_value)
-
- return AttrDefaults(
- equals_string=equals_string, equals_number=equals_number, ifabsent=default_value
- )
-
@classmethod
@abstractmethod
def check(cls, attr: Attribute) -> bool:
@@ -105,7 +67,7 @@ class MapScalar(AttributeMap):
description=attr.doc,
required=attr.required,
inlined=inlined(attr.dtype),
- **cls.handle_defaults(attr),
+ **defaults(attr),
)
return BuildResult(slots=[slot])
@@ -154,7 +116,7 @@ class MapArray(AttributeMap):
required=attr.required,
inlined=inlined(attr.dtype),
**expressions,
- **cls.handle_defaults(attr),
+ **defaults(attr),
)
return BuildResult(slots=[slot])
diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py
index f0b0053..0558862 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py
@@ -7,7 +7,7 @@ from typing import ClassVar, Optional, Type
from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition
-from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound
+from nwb_linkml.adapters.adapter import BuildResult, defaults, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
@@ -59,9 +59,7 @@ class MapScalar(DatasetMap):
slots:
- name: MyScalar
description: A scalar
- multivalued: false
range: int32
- required: false
"""
@@ -108,6 +106,7 @@ class MapScalar(DatasetMap):
description=cls.doc,
range=handle_dtype(cls.dtype),
**QUANTITY_MAP[cls.quantity],
+ **defaults(cls),
)
res = BuildResult(slots=[this_slot])
return res
@@ -208,7 +207,19 @@ class MapScalarAttributes(DatasetMap):
"""
Map to a scalar attribute with an adjoining "value" slot
"""
- value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True)
+ # the *value slot* within the generated class is always required,
+ # but the slot in the parent class referring to this one will indicate whether the whole
+ # thing is optional or not. You can't provide the attributes of the optional dataset
+ # without providing its value
+ quantity = QUANTITY_MAP[cls.quantity].copy()
+ quantity["required"] = True
+
+ value_slot = SlotDefinition(
+ name="value",
+ range=handle_dtype(cls.dtype),
+ **quantity,
+ **defaults(cls),
+ )
res.classes[0].attributes["value"] = value_slot
return res
@@ -616,7 +627,8 @@ class MapNVectors(DatasetMap):
DynamicTable (and the slot VectorData where this is called for)
is handled specially and just dropped, because we handle the possibility for
- arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes.
+ arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes
+ (see :class:`.MapNVectorData` ).
So really this is just a handler for the `Images` case
"""
@@ -652,6 +664,40 @@ class MapNVectors(DatasetMap):
return res
+class MapNVectorData(DatasetMap):
+ """
+ An extremely special case just for DynamicTable:
+ DynamicTable indicates that all of its extra columns are ``VectorData`` with an
+ unnamed, * quantity dataset similar to the case of :class:`.MapNVectors` .
+
+ We handle this with the :mod:`.includes.hdmf` module mixin classes instead,
+ and so to avoid generating a pointless slot and class,
+ we just catch that case and return nothing.
+ """
+
+ @classmethod
+ def check(c, cls: Dataset) -> bool:
+ """
+ Check for being an unnamed multivalued vector class that IS VectorData
+ """
+ return (
+ cls.name is None
+ and cls.neurodata_type_def is None
+ and cls.neurodata_type_inc
+ and cls.neurodata_type_inc == "VectorData"
+ and cls.quantity in ("*", "+")
+ )
+
+ @classmethod
+ def apply(
+ c, cls: Dataset, res: Optional[BuildResult] = None, name: Optional[str] = None
+ ) -> BuildResult:
+ """
+ Return ... nothing
+ """
+ return BuildResult()
+
+
class MapCompoundDtype(DatasetMap):
"""
A ``dtype`` declared as an array of types that function effectively as a row in a table.
diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py
index 0703aa0..f9ef07d 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/group.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/group.py
@@ -2,11 +2,11 @@
Adapter for NWB groups to linkml Classes
"""
-from typing import List, Type
+from typing import Type
from linkml_runtime.linkml_model import SlotDefinition
-from nwb_linkml.adapters.adapter import BuildResult
+from nwb_linkml.adapters.adapter import BuildResult, is_container
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.maps import QUANTITY_MAP
@@ -29,7 +29,7 @@ class GroupAdapter(ClassAdapter):
"""
# Handle container groups with only * quantity unnamed groups
if (
- len(self.cls.groups) > 0
+ self.cls.groups
and not self.cls.links
and all([self._check_if_container(g) for g in self.cls.groups])
): # and \
@@ -38,26 +38,28 @@ class GroupAdapter(ClassAdapter):
# handle if we are a terminal container group without making a new class
if (
- len(self.cls.groups) == 0
- and len(self.cls.datasets) == 0
+ not self.cls.groups
+ and not self.cls.datasets
and self.cls.neurodata_type_inc is not None
and self.parent is not None
):
return self.handle_container_slot(self.cls)
- nested_res = self.build_subclasses()
- # add links
- links = self.build_links()
+ nested_res = self.build_datasets()
+ nested_res += self.build_groups()
+ nested_res += self.build_links()
+ nested_res += self.build_containers()
+ nested_res += self.build_special_cases()
# we don't propagate slots up to the next level since they are meant for this
# level (ie. a way to refer to our children)
- res = self.build_base(extra_attrs=nested_res.slots + links)
+ res = self.build_base(extra_attrs=nested_res.slots)
# we do propagate classes tho
res.classes.extend(nested_res.classes)
return res
- def build_links(self) -> List[SlotDefinition]:
+ def build_links(self) -> BuildResult:
"""
Build links specified in the ``links`` field as slots that refer to other
classes, with an additional annotation specifying that they are in fact links.
@@ -66,7 +68,7 @@ class GroupAdapter(ClassAdapter):
file hierarchy as a string.
"""
if not self.cls.links:
- return []
+ return BuildResult()
annotations = [{"tag": "source_type", "value": "link"}]
@@ -83,7 +85,7 @@ class GroupAdapter(ClassAdapter):
)
for link in self.cls.links
]
- return slots
+ return BuildResult(slots=slots)
def handle_container_group(self, cls: Group) -> BuildResult:
"""
@@ -129,7 +131,7 @@ class GroupAdapter(ClassAdapter):
# We are a top-level container class like ProcessingModule
base = self.build_base()
# remove all the attributes and replace with child slot
- base.classes[0].attributes = [slot]
+ base.classes[0].attributes.update({slot.name: slot})
return base
def handle_container_slot(self, cls: Group) -> BuildResult:
@@ -167,28 +169,88 @@ class GroupAdapter(ClassAdapter):
return BuildResult(slots=[slot])
- def build_subclasses(self) -> BuildResult:
+ def build_datasets(self) -> BuildResult:
"""
Build nested groups and datasets
Create ClassDefinitions for each, but then also create SlotDefinitions that
will be used as attributes linking the main class to the subclasses
+
+ Datasets are simple, they are terminal classes, and all logic
+ for creating slots vs. classes is handled by the adapter class
"""
- # Datasets are simple, they are terminal classes, and all logic
- # for creating slots vs. classes is handled by the adapter class
dataset_res = BuildResult()
- for dset in self.cls.datasets:
- dset_adapter = DatasetAdapter(cls=dset, parent=self)
- dataset_res += dset_adapter.build()
+ if self.cls.datasets:
+ for dset in self.cls.datasets:
+ dset_adapter = DatasetAdapter(cls=dset, parent=self)
+ dataset_res += dset_adapter.build()
+ return dataset_res
+
+ def build_groups(self) -> BuildResult:
+ """
+ Build subgroups, excluding pure container subgroups
+ """
group_res = BuildResult()
- for group in self.cls.groups:
- group_adapter = GroupAdapter(cls=group, parent=self)
- group_res += group_adapter.build()
+ if self.cls.groups:
+ for group in self.cls.groups:
+ if is_container(group):
+ continue
+ group_adapter = GroupAdapter(cls=group, parent=self)
+ group_res += group_adapter.build()
- res = dataset_res + group_res
+ return group_res
+ def build_containers(self) -> BuildResult:
+ """
+ Build all container types into a single ``value`` slot
+ """
+ res = BuildResult()
+ if not self.cls.groups:
+ return res
+ containers = [grp for grp in self.cls.groups if is_container(grp)]
+ if not containers:
+ return res
+
+ if len(containers) == 1:
+ range = {"range": containers[0].neurodata_type_inc}
+ description = containers[0].doc
+ else:
+ range = {"any_of": [{"range": subcls.neurodata_type_inc} for subcls in containers]}
+ description = "\n\n".join([grp.doc for grp in containers])
+
+ slot = SlotDefinition(
+ name="value",
+ multivalued=True,
+ inlined=True,
+ inlined_as_list=False,
+ description=description,
+ **range,
+ )
+
+ if self.debug: # pragma: no cover - only used in development
+ slot.annotations["group_adapter"] = {
+ "tag": "slot_adapter",
+ "value": "container_value_slot",
+ }
+ res.slots = [slot]
+ return res
+
+ def build_special_cases(self) -> BuildResult:
+ """
+ Special cases, at this point just for NWBFile, which has
+ extra ``.specloc`` and ``specifications`` attrs
+ """
+ res = BuildResult()
+ if self.cls.neurodata_type_def == "NWBFile":
+ res.slots = [
+ SlotDefinition(
+ name="specifications",
+ range="dict",
+ description="Nested dictionary of schema specifications",
+ ),
+ ]
return res
def build_self_slot(self) -> SlotDefinition:
diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
index c6abd70..76d1835 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
@@ -8,8 +8,7 @@ for extracting information and generating translated schema
import contextlib
from copy import copy
from pathlib import Path
-from pprint import pformat
-from typing import Dict, List, Optional
+from typing import Dict, Generator, List, Optional
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import Annotation, SchemaDefinition
@@ -19,7 +18,7 @@ from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.lang_elements import NwbLangSchema
from nwb_linkml.ui import AdapterProgress
-from nwb_schema_language import Namespaces
+from nwb_schema_language import Dataset, Group, Namespaces
class NamespacesAdapter(Adapter):
@@ -31,6 +30,9 @@ class NamespacesAdapter(Adapter):
schemas: List[SchemaAdapter]
imported: List["NamespacesAdapter"] = Field(default_factory=list)
+ _completed: bool = False
+ """whether we have run the :meth:`.complete_namespace` method"""
+
@classmethod
def from_yaml(cls, path: Path) -> "NamespacesAdapter":
"""
@@ -65,7 +67,7 @@ class NamespacesAdapter(Adapter):
needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns)
ns_adapter.imported.append(needed_adapter)
- ns_adapter.populate_imports()
+ ns_adapter.complete_namespaces()
return ns_adapter
@@ -76,6 +78,9 @@ class NamespacesAdapter(Adapter):
Build the NWB namespace to the LinkML Schema
"""
+ if not self._completed:
+ self.complete_namespaces()
+
sch_result = BuildResult()
for sch in self.schemas:
if progress is not None:
@@ -149,45 +154,143 @@ class NamespacesAdapter(Adapter):
break
return self
- def find_type_source(self, name: str) -> SchemaAdapter:
+ def complete_namespaces(self) -> None:
"""
- Given some neurodata_type_inc, find the schema that it's defined in.
+ After loading the namespace, and after any imports have been added afterwards,
+ this must be called to complete the definitions of the contained schema objects.
- Rather than returning as soon as a match is found, check all
+ This is not automatic because NWB doesn't have a formal dependency resolution system,
+ so it is often impossible to know which imports are needed until after the namespace
+ adapter has been instantiated.
+
+ It **is** automatically called if it hasn't been already by the :meth:`.build` method.
"""
- # First check within the main schema
- internal_matches = []
- for schema in self.schemas:
- class_names = [cls.neurodata_type_def for cls in schema.created_classes]
- if name in class_names:
- internal_matches.append(schema)
+ self._populate_imports()
+ self._roll_down_inheritance()
- if len(internal_matches) > 1:
- raise KeyError(
- f"Found multiple schemas in namespace that define {name}:\ninternal:"
- f" {pformat(internal_matches)}\nimported:{pformat(internal_matches)}"
- )
- elif len(internal_matches) == 1:
- return internal_matches[0]
+ for i in self.imported:
+ i.complete_namespaces()
- import_matches = []
- for imported_ns in self.imported:
- for schema in imported_ns.schemas:
- class_names = [cls.neurodata_type_def for cls in schema.created_classes]
- if name in class_names:
- import_matches.append(schema)
+ self._completed = True
- if len(import_matches) > 1:
- raise KeyError(
- f"Found multiple schemas in namespace that define {name}:\ninternal:"
- f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
- )
- elif len(import_matches) == 1:
- return import_matches[0]
+ def _roll_down_inheritance(self) -> None:
+ """
+ nwb-schema-language inheritance doesn't work like normal python inheritance -
+ instead of inheriting everything at the 'top level' of a class, it also
+ recursively merges all properties from the parent objects.
+
+ While this operation does not take care to modify classes in a way that respect their order
+ (i.e. roll down ancestor classes first, in order, before the leaf classes),
+ it doesn't matter - this method should be both idempotent and order insensitive
+ for a given source schema.
+
+ References:
+ https://github.com/NeurodataWithoutBorders/pynwb/issues/1954
+ """
+ for cls in self.walk_types(self, (Group, Dataset)):
+ if not cls.neurodata_type_inc:
+ continue
+
+ parents = self._get_class_ancestors(cls, include_child=True)
+
+ # merge and cast
+ new_cls: dict = {}
+ for i, parent in enumerate(parents):
+ # we want a full roll-down of all the ancestor classes,
+ # but we make an abbreviated leaf class
+ complete = i != len(parents) - 1
+ new_cls = roll_down_nwb_class(new_cls, parent, complete=complete)
+ new_cls: Group | Dataset = type(cls)(**new_cls)
+ new_cls.parent = cls.parent
+
+ # reinsert
+ self._overwrite_class(new_cls, cls)
+
+ def _get_class_ancestors(
+ self, cls: Dataset | Group, include_child: bool = True
+ ) -> list[Dataset | Group]:
+ """
+ Get the chain of ancestor classes inherited via ``neurodata_type_inc``
+
+ Args:
+ cls (:class:`.Dataset` | :class:`.Group`): The class to get ancestors of
+ include_child (bool): If ``True`` (default), include ``cls`` in the output list
+ """
+ parent = self.get(cls.neurodata_type_inc)
+ parents = [parent]
+ while parent.neurodata_type_inc:
+ parent = self.get(parent.neurodata_type_inc)
+ parents.insert(0, parent)
+
+ if include_child:
+ parents.append(cls)
+
+ return parents
+
+ def _overwrite_class(self, new_cls: Dataset | Group, old_cls: Dataset | Group) -> None:
+ """
+ Overwrite the version of a dataset or group that is stored in our schemas
+ """
+ if old_cls.parent:
+ if isinstance(old_cls, Dataset):
+ new_cls.parent.datasets[new_cls.parent.datasets.index(old_cls)] = new_cls
+ else:
+ new_cls.parent.groups[new_cls.parent.groups.index(old_cls)] = new_cls
else:
- raise KeyError(f"No schema found that define {name}")
+ # top level class, need to go and find it
+ schema = self.find_type_source(old_cls)
+ if isinstance(new_cls, Dataset):
+ schema.datasets[schema.datasets.index(old_cls)] = new_cls
+ else:
+ schema.groups[schema.groups.index(old_cls)] = new_cls
- def populate_imports(self) -> "NamespacesAdapter":
+ def find_type_source(self, cls: str | Dataset | Group, fast: bool = False) -> SchemaAdapter:
+ """
+ Given some type (as `neurodata_type_def`), find the schema that it's defined in.
+
+ Rather than returning as soon as a match is found, ensure that duplicates are
+ not found within the primary schema, then so the same for all imported schemas.
+
+ Args:
+ cls (str | :class:`.Dataset` | :class:`.Group`): The ``neurodata_type_def``
+ to look for the source of. If a Dataset or Group, look for the object itself
+ (cls in schema.datasets), otherwise look for a class with a matching name.
+ fast (bool): If ``True``, return as soon as a match is found.
+ If ``False`, return after checking all schemas for duplicates.
+
+ Returns:
+ :class:`.SchemaAdapter`
+
+ Raises:
+ KeyError: if multiple schemas or no schemas are found
+ """
+ matches = []
+ for schema in self.all_schemas():
+ in_schema = False
+ if (
+ isinstance(cls, str)
+ and cls in [c.neurodata_type_def for c in schema.created_classes]
+ or isinstance(cls, Dataset)
+ and cls in schema.datasets
+ or isinstance(cls, Group)
+ and cls in schema.groups
+ ):
+ in_schema = True
+
+ if in_schema:
+ if fast:
+ return schema
+ else:
+ matches.append(schema)
+
+ if len(matches) > 1:
+ raise KeyError(f"Found multiple schemas in namespace that define {cls}:\n{matches}")
+ elif len(matches) == 1:
+ return matches[0]
+ else:
+ raise KeyError(f"No schema found that define {cls}")
+
+ def _populate_imports(self) -> "NamespacesAdapter":
"""
Populate the imports that are needed for each schema file
@@ -279,3 +382,109 @@ class NamespacesAdapter(Adapter):
if name in sources:
return ns.name
return None
+
+ def all_schemas(self) -> Generator[SchemaAdapter, None, None]:
+ """
+ Iterator over all schemas including imports
+ """
+ for sch in self.schemas:
+ yield sch
+ for imported in self.imported:
+ for sch in imported.schemas:
+ yield sch
+
+
+def roll_down_nwb_class(
+ source: Group | Dataset | dict, target: Group | Dataset | dict, complete: bool = False
+) -> dict:
+ """
+ Merge an ancestor (via ``neurodata_type_inc`` ) source class with a
+ child ``target`` class.
+
+ On the first recursive pass, only those values that are set on the target are copied from the
+ source class - this isn't a true merging, what we are after is to recursively merge all the
+ values that are modified in the child class with those of the parent class below the top level,
+ the top-level attributes will be carried through via normal inheritance.
+
+ Rather than re-instantiating the child class, we return the dictionary so that this
+ function can be used in series to merge a whole ancestry chain within
+ :class:`.NamespacesAdapter` , but merging isn't exposed in the function since
+ ancestor class definitions can be spread out over many schemas,
+ and we need the orchestration of the adapter to have them in all cases we'd be using this.
+
+ Args:
+ source (dict): source dictionary
+ target (dict): target dictionary (values merged over source)
+ complete (bool): (default ``False``)do a complete merge, merging everything
+ from source to target without trying to minimize redundancy.
+ Used to collapse ancestor classes before the terminal class.
+
+ References:
+ https://github.com/NeurodataWithoutBorders/pynwb/issues/1954
+
+ """
+ if isinstance(source, (Group, Dataset)):
+ source = source.model_dump(exclude_none=True)
+ if isinstance(target, (Group, Dataset)):
+ target = target.model_dump(exclude_none=True)
+
+ exclude = ("neurodata_type_def",)
+
+ # if we are on the first recursion, we exclude top-level items that are not set in the target
+ if complete:
+ ret = {k: v for k, v in source.items() if k not in exclude}
+ else:
+ ret = {k: v for k, v in source.items() if k not in exclude and k in target}
+
+ for key, value in target.items():
+ if key not in ret:
+ ret[key] = value
+ elif isinstance(value, dict):
+ if key in ret:
+ ret[key] = roll_down_nwb_class(ret[key], value, complete=True)
+ else:
+ ret[key] = value
+ elif isinstance(value, list) and all([isinstance(v, dict) for v in value]):
+ src_keys = {v["name"]: ret[key].index(v) for v in ret.get(key, {}) if "name" in v}
+ target_keys = {v["name"]: value.index(v) for v in value if "name" in v}
+
+ new_val = []
+ # screwy double iteration to preserve dict order
+ # all dicts not in target, if in depth > 0
+ if complete:
+ new_val.extend(
+ [
+ ret[key][src_keys[k]]
+ for k in src_keys
+ if k in set(src_keys.keys()) - set(target_keys.keys())
+ ]
+ )
+ # all dicts not in source
+ new_val.extend(
+ [
+ value[target_keys[k]]
+ for k in target_keys
+ if k in set(target_keys.keys()) - set(src_keys.keys())
+ ]
+ )
+ # merge dicts in both
+ new_val.extend(
+ [
+ roll_down_nwb_class(ret[key][src_keys[k]], value[target_keys[k]], complete=True)
+ for k in target_keys
+ if k in set(src_keys.keys()).intersection(set(target_keys.keys()))
+ ]
+ )
+ new_val = sorted(new_val, key=lambda i: i["name"])
+ # add any dicts that don't have the list_key
+ # they can't be merged since they can't be matched
+ if complete:
+ new_val.extend([v for v in ret.get(key, {}) if "name" not in v])
+ new_val.extend([v for v in value if "name" not in v])
+
+ ret[key] = new_val
+
+ else:
+ ret[key] = value
+
+ return ret
diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py
index 1928cf5..659bd2b 100644
--- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py
+++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py
@@ -9,13 +9,13 @@ import re
from dataclasses import dataclass, field
from pathlib import Path
from types import ModuleType
-from typing import Callable, ClassVar, Dict, List, Literal, Optional, Tuple
+from typing import Callable, ClassVar, Dict, List, Optional, Tuple
from linkml.generators import PydanticGenerator
from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray
from linkml.generators.pydanticgen.build import ClassResult, SlotResult
from linkml.generators.pydanticgen.pydanticgen import SplitMode
-from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule
+from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport, PydanticModule
from linkml_runtime.linkml_model.meta import (
ArrayExpression,
SchemaDefinition,
@@ -26,8 +26,10 @@ from linkml_runtime.utils.formatutils import remove_empty_items
from linkml_runtime.utils.schemaview import SchemaView
from nwb_linkml.includes.base import (
+ BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD,
BASEMODEL_COERCE_VALUE,
+ BASEMODEL_EXTRA_TO_VALUE,
BASEMODEL_GETITEM,
)
from nwb_linkml.includes.hdmf import (
@@ -55,10 +57,17 @@ class NWBPydanticGenerator(PydanticGenerator):
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
BASEMODEL_GETITEM,
BASEMODEL_COERCE_VALUE,
+ BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD,
+ BASEMODEL_EXTRA_TO_VALUE,
)
split: bool = True
- imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")])
+ imports: list[Import] = field(
+ default_factory=lambda: [
+ Import(module="numpy", alias="np"),
+ Import(module="pydantic", objects=[ObjectImport(name="model_validator")]),
+ ]
+ )
schema_map: Optional[Dict[str, SchemaDefinition]] = None
"""See :meth:`.LinkMLProvider.build` for usage - a list of specific versions to import from"""
@@ -70,7 +79,7 @@ class NWBPydanticGenerator(PydanticGenerator):
emit_metadata: bool = True
gen_classvars: bool = True
gen_slots: bool = True
- extra_fields: Literal["allow", "forbid", "ignore"] = "allow"
+ # extra_fields: Literal["allow", "forbid", "ignore"] = "allow"
skip_meta: ClassVar[Tuple[str]] = ("domain_of", "alias")
@@ -136,7 +145,7 @@ class NWBPydanticGenerator(PydanticGenerator):
"""Customize dynamictable behavior"""
cls = AfterGenerateClass.inject_dynamictable(cls)
cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv)
- cls = AfterGenerateClass.inject_elementidentifiers(cls, sv, self._get_element_import)
+ cls = AfterGenerateClass.inject_dynamictable_imports(cls, sv, self._get_element_import)
cls = AfterGenerateClass.strip_vector_data_slots(cls, sv)
return cls
@@ -267,7 +276,7 @@ class AfterGenerateClass:
"""
if cls.cls.name == "DynamicTable":
- cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"]
+ cls.cls.bases = ["DynamicTableMixin"]
if (
cls.injected_classes is None
@@ -285,18 +294,18 @@ class AfterGenerateClass:
else: # pragma: no cover - for completeness, shouldn't happen
cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
elif cls.cls.name == "VectorData":
- cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"]
+ cls.cls.bases = ["VectorDataMixin"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
elif cls.cls.name == "VectorIndex":
- cls.cls.bases = ["VectorIndexMixin", "ConfiguredBaseModel"]
+ cls.cls.bases = ["VectorIndexMixin"]
elif cls.cls.name == "DynamicTableRegion":
- cls.cls.bases = ["DynamicTableRegionMixin", "VectorData", "ConfiguredBaseModel"]
+ cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"]
elif cls.cls.name == "AlignedDynamicTable":
cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"]
elif cls.cls.name == "ElementIdentifiers":
- cls.cls.bases = ["ElementIdentifiersMixin", "Data", "ConfiguredBaseModel"]
+ cls.cls.bases = ["ElementIdentifiersMixin", "Data"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
@@ -346,19 +355,22 @@ class AfterGenerateClass:
return cls
@staticmethod
- def inject_elementidentifiers(
+ def inject_dynamictable_imports(
cls: ClassResult, sv: SchemaView, import_method: Callable[[str], Import]
) -> ClassResult:
"""
- Inject ElementIdentifiers into module that define dynamictables -
- needed to handle ID columns
+ Ensure that schema that contain dynamictables have all the imports needed to use them
"""
if (
cls.source.is_a == "DynamicTable"
or "DynamicTable" in sv.class_ancestors(cls.source.name)
) and sv.schema.name != "hdmf-common.table":
- imp = import_method("ElementIdentifiers")
- cls.imports += [imp]
+ imp = [
+ import_method("ElementIdentifiers"),
+ import_method("VectorData"),
+ import_method("VectorIndex"),
+ ]
+ cls.imports += imp
return cls
@staticmethod
diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py
index 3ecae8c..4747f57 100644
--- a/nwb_linkml/src/nwb_linkml/includes/base.py
+++ b/nwb_linkml/src/nwb_linkml/includes/base.py
@@ -3,7 +3,7 @@ Modifications to the ConfiguredBaseModel used by all generated classes
"""
BASEMODEL_GETITEM = """
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
\"\"\"Try and get a value from value or "data" if we have it\"\"\"
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -16,7 +16,7 @@ BASEMODEL_GETITEM = """
BASEMODEL_COERCE_VALUE = """
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
\"\"\"Try to rescue instantiation by using the value field\"\"\"
try:
return handler(v)
@@ -30,6 +30,20 @@ BASEMODEL_COERCE_VALUE = """
raise e1
"""
+BASEMODEL_CAST_WITH_VALUE = """
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ \"\"\"Try to rescue instantiation by casting into the model's value field\"\"\"
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+"""
+
BASEMODEL_COERCE_CHILD = """
@field_validator("*", mode="before")
@classmethod
@@ -41,9 +55,36 @@ BASEMODEL_COERCE_CHILD = """
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
"""
+
+BASEMODEL_EXTRA_TO_VALUE = """
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ \"\"\"
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ \"\"\"
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key:val for key,val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+"""
diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py
index 7a7d294..3d456d0 100644
--- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py
+++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py
@@ -39,8 +39,30 @@ if TYPE_CHECKING: # pragma: no cover
T = TypeVar("T", bound=NDArray)
T_INJECT = 'T = TypeVar("T", bound=NDArray)'
+if "pytest" in sys.modules:
+ from nwb_models.models import ConfiguredBaseModel
+else:
-class DynamicTableMixin(BaseModel):
+ class ConfiguredBaseModel(BaseModel):
+ """
+ Dummy ConfiguredBaseModel (without its methods from :mod:`.includes.base` )
+ used so that the injected mixins inherit from the `ConfiguredBaseModel`
+ and we get a linear inheritance MRO (rather than needing to inherit
+ from the mixins *and* the configured base model) so that the
+ model_config is correctly resolved (ie. to allow extra args)
+ """
+
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+
+
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -295,13 +317,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -358,24 +386,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -426,7 +457,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -518,7 +549,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -574,7 +605,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -927,12 +958,18 @@ if "pytest" in sys.modules:
class VectorData(VectorDataMixin):
"""VectorData subclass for testing"""
- pass
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of what these vectors represent."""
+ )
class VectorIndex(VectorIndexMixin):
"""VectorIndex subclass for testing"""
- pass
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of what these vectors represent."""
+ )
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""DynamicTableRegion subclass for testing"""
diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py
index bf4fbe6..23bd2fa 100644
--- a/nwb_linkml/src/nwb_linkml/io/hdf5.py
+++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py
@@ -166,8 +166,13 @@ def _load_node(
raise TypeError(f"Nodes can only be h5py Datasets and Groups, got {obj}")
if "neurodata_type" in obj.attrs:
+ # SPECIAL CASE: ignore `.specloc`
+ if ".specloc" in args:
+ del args[".specloc"]
+
model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"])
return model(**args)
+
else:
if "name" in args:
del args["name"]
diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py
index 8f960c7..065d0d3 100644
--- a/nwb_linkml/src/nwb_linkml/io/schema.py
+++ b/nwb_linkml/src/nwb_linkml/io/schema.py
@@ -131,7 +131,7 @@ def load_namespace_adapter(
else:
adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
- adapter.populate_imports()
+ adapter.complete_namespaces()
return adapter
diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py
index c199062..476e6e2 100644
--- a/nwb_linkml/src/nwb_linkml/lang_elements.py
+++ b/nwb_linkml/src/nwb_linkml/lang_elements.py
@@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import (
TypeDefinition,
)
-from nwb_linkml.maps import flat_to_linkml
+from nwb_linkml.maps import flat_to_linkml, linkml_reprs
def _make_dtypes() -> List[TypeDefinition]:
@@ -36,8 +36,13 @@ def _make_dtypes() -> List[TypeDefinition]:
name=nwbtype,
minimum_value=amin,
typeof=linkmltype, # repr=repr_string
+ repr=linkml_reprs.get(nwbtype, None),
)
DTypeTypes.append(atype)
+
+ # a dict type!
+ DTypeTypes.append(TypeDefinition(name="dict", repr="dict"))
+
return DTypeTypes
diff --git a/nwb_linkml/src/nwb_linkml/maps/__init__.py b/nwb_linkml/src/nwb_linkml/maps/__init__.py
index 8b01447..cdad7d0 100644
--- a/nwb_linkml/src/nwb_linkml/maps/__init__.py
+++ b/nwb_linkml/src/nwb_linkml/maps/__init__.py
@@ -2,7 +2,7 @@
Mapping from one domain to another
"""
-from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np
+from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np, linkml_reprs
from nwb_linkml.maps.map import Map
from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP
@@ -14,4 +14,5 @@ __all__ = [
"Map",
"flat_to_linkml",
"flat_to_np",
+ "linkml_reprs",
]
diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py
index 2497a65..95cb296 100644
--- a/nwb_linkml/src/nwb_linkml/maps/dtype.py
+++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py
@@ -39,6 +39,12 @@ flat_to_linkml = {
Map between the flat data types and the simpler linkml base types
"""
+linkml_reprs = {"numeric": "float | int"}
+"""
+``repr`` fields used in the nwb language elements injected in every namespace
+that give the nwb type a specific representation in the generated pydantic models
+"""
+
flat_to_np = {
"float": float,
"float32": np.float32,
@@ -66,6 +72,26 @@ flat_to_np = {
"isodatetime": np.datetime64,
}
+integer_types = {
+ "long",
+ "int64",
+ "int",
+ "int32",
+ "int16",
+ "short",
+ "int8",
+ "uint",
+ "uint32",
+ "uint16",
+ "uint8",
+ "uint64",
+}
+
+float_types = {"float", "float32", "double", "float64", "numeric"}
+
+string_types = {"text", "utf", "utf8", "utf_8", "ascii"}
+
+
np_to_python = {
Any: Any,
np.number: float,
diff --git a/nwb_linkml/src/nwb_linkml/maps/quantity.py b/nwb_linkml/src/nwb_linkml/maps/quantity.py
index 8980076..7ae870a 100644
--- a/nwb_linkml/src/nwb_linkml/maps/quantity.py
+++ b/nwb_linkml/src/nwb_linkml/maps/quantity.py
@@ -9,10 +9,16 @@ We will handle cardinality of array dimensions elsewhere
"""
QUANTITY_MAP = {
- "*": {"required": False, "multivalued": True},
+ "*": {"required": None, "multivalued": True},
"+": {"required": True, "multivalued": True},
- "?": {"required": False, "multivalued": False},
- 1: {"required": True, "multivalued": False},
+ "?": {"required": None, "multivalued": None},
+ 1: {"required": True, "multivalued": None},
# include the NoneType for indexing
None: {"required": None, "multivalued": None},
}
+"""
+Map between NWB quantity values and linkml quantity metaslot values.
+
+Use ``None`` for defaults (required: False, multivalued: False) rather than ``False``
+to avoid adding unnecessary attributes
+"""
diff --git a/nwb_linkml/src/nwb_linkml/plot.py b/nwb_linkml/src/nwb_linkml/plot.py
index e4cb4c9..e08f536 100644
--- a/nwb_linkml/src/nwb_linkml/plot.py
+++ b/nwb_linkml/src/nwb_linkml/plot.py
@@ -85,7 +85,7 @@ def make_node(
def make_graph(namespaces: "NamespacesAdapter", recurse: bool = True) -> List[CytoElement]:
- namespaces.populate_imports()
+ namespaces.complete_namespaces()
nodes = []
element: Namespace | Group | Dataset
print("walking graph")
diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py
index fe8dec5..c106389 100644
--- a/nwb_linkml/src/nwb_linkml/providers/linkml.py
+++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py
@@ -127,7 +127,7 @@ class LinkMLProvider(Provider):
for schema_needs in adapter.needed_imports.values():
for needed in schema_needs:
adapter.imported.append(ns_adapters[needed])
- adapter.populate_imports()
+ adapter.complete_namespaces()
# then do the build
res = {}
diff --git a/nwb_linkml/tests/test_adapters/test_adapter.py b/nwb_linkml/tests/test_adapters/test_adapter.py
index 4514f5d..b3fdb27 100644
--- a/nwb_linkml/tests/test_adapters/test_adapter.py
+++ b/nwb_linkml/tests/test_adapters/test_adapter.py
@@ -54,7 +54,7 @@ def test_walk_field_values(nwb_core_fixture):
text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, "dtype", value="text"))
assert all([d.dtype == "text" for d in text_models])
# 135 known value from regex search
- assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 135
+ assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 155
def test_build_result(linkml_schema_bare):
diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
index bbcb739..8af60e6 100644
--- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
+++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
@@ -1,6 +1,9 @@
+from pathlib import Path
+
import pytest
-from nwb_linkml.adapters import SchemaAdapter
+from nwb_linkml.adapters import NamespacesAdapter, SchemaAdapter
+from nwb_schema_language import Attribute, Dataset, FlatDtype, Group, Namespace, Namespaces, Schema
@pytest.mark.parametrize(
@@ -19,7 +22,7 @@ def test_find_type_source(nwb_core_fixture, class_name, schema_file, namespace_n
def test_populate_imports(nwb_core_fixture):
- nwb_core_fixture.populate_imports()
+ nwb_core_fixture._populate_imports()
schema: SchemaAdapter
assert len(nwb_core_fixture.schemas) > 0
for schema in nwb_core_fixture.schemas:
@@ -48,15 +51,109 @@ def test_skip_imports(nwb_core_fixture):
assert all([ns == "core" for ns in namespaces])
-@pytest.mark.skip()
-def test_populate_inheritance(nwb_core_fixture):
+def test_roll_down_inheritance():
"""
Classes should receive and override the properties of their parents
when they have neurodata_type_inc
- Args:
- nwb_core_fixture:
-
- Returns:
-
"""
- pass
+ parent_cls = Group(
+ neurodata_type_def="Parent",
+ doc="parent",
+ attributes=[
+ Attribute(name="a", dims=["a", "b"], shape=[1, 2], doc="a", value="a"),
+ Attribute(name="b", dims=["c", "d"], shape=[3, 4], doc="b", value="b"),
+ ],
+ datasets=[
+ Dataset(
+ name="data",
+ dims=["a", "b"],
+ shape=[1, 2],
+ doc="data",
+ attributes=[
+ Attribute(name="c", dtype=FlatDtype.int32, doc="c"),
+ Attribute(name="d", dtype=FlatDtype.int32, doc="d"),
+ ],
+ )
+ ],
+ )
+ parent_sch = Schema(source="parent.yaml")
+ parent_ns = Namespaces(
+ namespaces=[
+ Namespace(
+ author="hey",
+ contact="sup",
+ name="parent",
+ doc="a parent",
+ version="1",
+ schema=[parent_sch],
+ )
+ ]
+ )
+
+ child_cls = Group(
+ neurodata_type_def="Child",
+ neurodata_type_inc="Parent",
+ doc="child",
+ attributes=[Attribute(name="a", doc="a", value="z")],
+ datasets=[
+ Dataset(
+ name="data",
+ doc="data again",
+ attributes=[Attribute(name="c", doc="c", value="z"), Attribute(name="e", doc="e")],
+ ),
+ ],
+ groups=[Group(name="untyped_child", neurodata_type_inc="Parent", doc="untyped child")],
+ )
+ child_sch = Schema(source="child.yaml")
+ child_ns = Namespaces(
+ namespaces=[
+ Namespace(
+ author="hey",
+ contact="sup",
+ name="child",
+ doc="a child",
+ version="1",
+ schema=[child_sch, Schema(namespace="parent")],
+ )
+ ]
+ )
+
+ parent_schema_adapter = SchemaAdapter(path=Path("parent.yaml"), groups=[parent_cls])
+ parent_ns_adapter = NamespacesAdapter(namespaces=parent_ns, schemas=[parent_schema_adapter])
+ child_schema_adapter = SchemaAdapter(path=Path("child.yaml"), groups=[child_cls])
+ child_ns_adapter = NamespacesAdapter(
+ namespaces=child_ns, schemas=[child_schema_adapter], imported=[parent_ns_adapter]
+ )
+
+ child_ns_adapter.complete_namespaces()
+
+ child = child_ns_adapter.get("Child")
+ # overrides simple attrs
+ assert child.doc == "child"
+ # we don't receive attrs that aren't overridden in the child,
+ # instead we let python/linkml inheritance handle that for us
+ assert "b" not in [attr.name for attr in child.attributes]
+ # overrides values while preserving remaining values when set
+ attr_a = [attr for attr in child.attributes if attr.name == "a"][0]
+ assert attr_a.value == "z"
+ assert attr_a.dims == parent_cls.attributes[0].dims
+ assert [attr.value for attr in child.attributes if attr.name == "a"][0] == "z"
+
+ # preserve unset values in child datasets
+ assert child.datasets[0].dtype == parent_cls.datasets[0].dtype
+ assert child.datasets[0].dims == parent_cls.datasets[0].dims
+ # we *do* get undeclared attrs in child datasets,
+ # since those are not handled by python/linkml inheritance
+ assert "d" in [attr.name for attr in child.datasets[0].attributes]
+ # overrides set values in child datasets while preserving unset
+ c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0]
+ assert c_attr.value == "z"
+ assert c_attr.dtype == FlatDtype.int32
+ # preserves new attrs
+ assert "e" in [attr.name for attr in child.datasets[0].attributes]
+
+ # neurodata_type_def is not included in untyped children
+ assert child.groups[0].neurodata_type_def is None
+ # we don't set any of the attrs from the parent class here because we don't override them,
+ # so we don't need to merge them, and we don't want to clutter our linkml models unnecessarily
+ assert child.groups[0].attributes is None
diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py
index 53e3a39..1a801ae 100644
--- a/nwb_linkml/tests/test_includes/conftest.py
+++ b/nwb_linkml/tests/test_includes/conftest.py
@@ -114,14 +114,14 @@ def _icephys_stimulus_and_response(
n_samples = generator.integers(20, 50)
stimulus = VoltageClampStimulusSeries(
name=f"vcss_{i}",
- data=VoltageClampStimulusSeriesData(value=[i] * n_samples),
+ data=VoltageClampStimulusSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}",
sweep_number=i,
electrode=electrode,
)
response = VoltageClampSeries(
name=f"vcs_{i}",
- data=VoltageClampSeriesData(value=[i] * n_samples),
+ data=VoltageClampSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}",
electrode=electrode,
)
diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py
index a8b14b7..349a93f 100644
--- a/nwb_linkml/tests/test_includes/test_hdmf.py
+++ b/nwb_linkml/tests/test_includes/test_hdmf.py
@@ -149,8 +149,8 @@ def test_dynamictable_mixin_colnames_index():
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(10)),
- "new_col_2": hdmf.VectorData(value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
# explicit index with mismatching name
cols["weirdname_index"] = VectorIndexMixin(value=np.arange(10), target=cols["new_col_1"])
@@ -171,9 +171,9 @@ def test_dynamictable_mixin_colnames_ordered():
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(10)),
- "new_col_2": hdmf.VectorData(value=np.arange(10)),
- "new_col_3": hdmf.VectorData(value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
+ "new_col_3": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
order = ["new_col_2", "existing_col", "new_col_1", "new_col_3"]
@@ -198,7 +198,7 @@ def test_dynamictable_mixin_getattr():
class MyDT(DynamicTableMixin):
existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
- col = hdmf.VectorData(value=np.arange(10))
+ col = hdmf.VectorData(name="existing_col", description="", value=np.arange(10))
inst = MyDT(existing_col=col)
# regular lookup for attrs that exist
@@ -257,13 +257,17 @@ def test_dynamictable_resolve_index():
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(10)),
- "new_col_2": hdmf.VectorData(value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
# explicit index with mismatching name
- cols["weirdname_index"] = hdmf.VectorIndex(value=np.arange(10), target=cols["new_col_1"])
+ cols["weirdname_index"] = hdmf.VectorIndex(
+ name="weirdname_index", description="", value=np.arange(10), target=cols["new_col_1"]
+ )
# implicit index with matching name
- cols["new_col_2_index"] = hdmf.VectorIndex(value=np.arange(10))
+ cols["new_col_2_index"] = hdmf.VectorIndex(
+ name="new_col_2_index", description="", value=np.arange(10)
+ )
inst = MyDT(**cols)
assert inst.weirdname_index.target is inst.new_col_1
@@ -282,14 +286,14 @@ def test_dynamictable_assert_equal_length():
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(11)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(11)),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
cols = {
"existing_col": np.arange(11),
- "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
@@ -297,16 +301,20 @@ def test_dynamictable_assert_equal_length():
# wrong lengths are fine as long as the index is good
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(100)),
- "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 10) + 10),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)),
+ "new_col_1_index": hdmf.VectorIndex(
+ name="new_col_1_index", description="", value=np.arange(0, 100, 10) + 10
+ ),
}
_ = MyDT(**cols)
# but not fine if the index is not good
cols = {
"existing_col": np.arange(10),
- "new_col_1": hdmf.VectorData(value=np.arange(100)),
- "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)),
+ "new_col_1_index": hdmf.VectorIndex(
+ name="new_col_1_index", description="", value=np.arange(0, 100, 5) + 5
+ ),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
@@ -321,8 +329,8 @@ def test_dynamictable_setattr():
existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
cols = {
- "existing_col": hdmf.VectorData(value=np.arange(10)),
- "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ "existing_col": hdmf.VectorData(name="existing_col", description="", value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
}
inst = MyDT(existing_col=cols["existing_col"])
assert inst.colnames == ["existing_col"]
@@ -335,7 +343,7 @@ def test_dynamictable_setattr():
# model validators should be called to ensure equal length
with pytest.raises(ValidationError):
- inst.new_col_2 = hdmf.VectorData(value=np.arange(11))
+ inst.new_col_2 = hdmf.VectorData(name="new_col_2", description="", value=np.arange(11))
def test_vectordata_indexing():
@@ -346,7 +354,7 @@ def test_vectordata_indexing():
value_array, index_array = _ragged_array(n_rows)
value_array = np.concatenate(value_array)
- data = hdmf.VectorData(value=value_array)
+ data = hdmf.VectorData(name="data", description="", value=value_array)
# before we have an index, things should work as normal, indexing a 1D array
assert data[0] == 0
@@ -356,7 +364,7 @@ def test_vectordata_indexing():
data[0] = 0
# indexes by themselves are the same
- index_notarget = hdmf.VectorIndex(value=index_array)
+ index_notarget = hdmf.VectorIndex(name="no_target_index", description="", value=index_array)
assert index_notarget[0] == index_array[0]
assert all(index_notarget[0:3] == index_array[0:3])
oldval = index_array[0]
@@ -364,7 +372,7 @@ def test_vectordata_indexing():
assert index_notarget[0] == 5
index_notarget[0] = oldval
- index = hdmf.VectorIndex(value=index_array, target=data)
+ index = hdmf.VectorIndex(name="data_index", description="", value=index_array, target=data)
data._index = index
# after an index, both objects should index raggedly
@@ -396,8 +404,10 @@ def test_vectordata_getattr():
"""
VectorData and VectorIndex both forward getattr to ``value``
"""
- data = hdmf.VectorData(value=np.arange(100))
- index = hdmf.VectorIndex(value=np.arange(10, 101, 10), target=data)
+ data = hdmf.VectorData(name="data", description="", value=np.arange(100))
+ index = hdmf.VectorIndex(
+ name="data_index", description="", value=np.arange(10, 101, 10), target=data
+ )
# get attrs that we defined on the models
# i.e. no attribute errors here
@@ -447,7 +457,9 @@ def test_dynamictable_region_indexing(basic_table):
index = np.array([9, 4, 8, 3, 7, 2, 6, 1, 5, 0])
- table_region = hdmf.DynamicTableRegion(value=index, table=inst)
+ table_region = hdmf.DynamicTableRegion(
+ name="table_region", description="", value=index, table=inst
+ )
row = table_region[1]
assert all(row.iloc[0] == index[1])
@@ -499,10 +511,14 @@ def test_dynamictable_region_ragged():
timeseries_index=spike_idx,
)
region = hdmf.DynamicTableRegion(
+ name="region",
+ description="a table region what else would it be",
table=table,
value=value,
)
- index = hdmf.VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx)
+ index = hdmf.VectorIndex(
+ name="region_index", description="hgggggggjjjj", target=region, value=idx
+ )
region._index = index
rows = region[1]
@@ -594,8 +610,8 @@ def test_mixed_aligned_dynamictable(aligned_table):
value_array, index_array = _ragged_array(10)
value_array = np.concatenate(value_array)
- data = hdmf.VectorData(value=value_array)
- index = hdmf.VectorIndex(value=index_array)
+ data = hdmf.VectorData(name="data", description="", value=value_array)
+ index = hdmf.VectorIndex(name="data_index", description="", value=index_array)
atable = AlignedTable(**cols, extra_col=data, extra_col_index=index)
atable[0]
diff --git a/nwb_linkml/tests/test_io/test_io_nwb.py b/nwb_linkml/tests/test_io/test_io_nwb.py
index 1ad51ed..32a50d1 100644
--- a/nwb_linkml/tests/test_io/test_io_nwb.py
+++ b/nwb_linkml/tests/test_io/test_io_nwb.py
@@ -80,7 +80,7 @@ def test_position(read_nwbfile, read_pynwb):
py_trials = read_pynwb.trials.to_dataframe()
pd.testing.assert_frame_equal(py_trials, trials)
- spatial = read_nwbfile.processing["behavior"].Position.SpatialSeries
+ spatial = read_nwbfile.processing["behavior"]["Position"]["SpatialSeries"]
py_spatial = read_pynwb.processing["behavior"]["Position"]["SpatialSeries"]
_compare_attrs(spatial, py_spatial)
assert np.array_equal(spatial[:], py_spatial.data[:])
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py
index 263d389..b1c670c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_0.hdmf_common_table import Container, Data, DynamicTable
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py
index 5691dab..5a63426 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py
index ab24817..bdd469e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
index 136ec40..a3d315d 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py
index 4ab3c01..2a4db9b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py
index ae16391..5cc40db 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBContainer,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py
index 439d5af..e4213de 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py
index 33784d6..a42cad3 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py
index e8a4896..021de7b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py
index 998dda0..44f7e6a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py
index 70db9d7..eda0cf9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
index 17edeec..3031e3c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_0.core_nwb_base import NWBData, NWBDataInterface
@@ -31,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -65,6 +66,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -75,12 +88,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py
index d4b265d..805e58c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
Image,
@@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -193,12 +205,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py
index f0f43be..044db0d 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_2.hdmf_common_table import Container, Data, DynamicTable
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py
index e96918c..aaf0f41 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py
index 80de9c0..3ad7dd1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
index 169dd5e..4d16d36 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py
index ed1353e..ce0dbfd 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py
index b5a0b9b..49a1846 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBContainer,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py
index 991c1e8..19b2f81 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py
index 52c10a5..990cc6a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py
index 19a036f..2249720 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py
index 609baf0..64649f4 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py
index a951c51..deecf64 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
index 1c6f4ad..dd8a84e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_1.core_nwb_base import NWBData, NWBDataInterface
@@ -31,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -65,6 +66,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -75,12 +88,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py
index 7f2ade1..cbab2e1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
Image,
@@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -193,12 +205,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py
index 956e37d..bce6112 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py
index 271fceb..1536adb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py
index 28aa954..de4bd04 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
index 9664726..22e0ff7 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py
index c12a965..d599a8c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_2.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py
index ec66471..8b5d38b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBContainer,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py
index 9b7729d..464dcc8 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py
index 6e805b1..b88c1b1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py
index d80af52..e12bfb2 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_2.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py
index debdaf9..7bdc063 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py
index e7b56da..2ac1358 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
index bfa2ad5..577cd33 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py
index 9ba793b..9dbeca9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
Image,
@@ -152,7 +152,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -162,7 +162,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -173,7 +173,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -186,6 +186,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -196,12 +208,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py
index 0e81486..8c5293b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py
index 42613b4..0775e8f 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py
index 1aeeb6c..63b8926 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
index d4f5172..0db2758 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py
index 61f894b..7ed9c06 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_4.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py
index 9167a4d..32f00ff 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBContainer,
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -85,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py
index 8067eb7..0a4bf27 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py
index 05c1d6e..f0fb808 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py
index 5ff807c..65b6dc0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_4.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py
index 20f6353..af170bd 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py
index b91e448..91a59a0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
index 362bc59..ba2cc17 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py
index 23ec3dd..0950468 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
Image,
@@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -203,12 +215,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py
index 86fe03f..36e9a95 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py
index f4f5e96..f15481e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py
index 5abfc5d..124d12d 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
index 48d2503..39d0aad 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py
index 6a8ba5a..d610d4a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_5.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py
index 59aa79e..fdcd4c9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBContainer,
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -85,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py
index ee68bff..3c878c5 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py
index f3d0d5f..79f339a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py
index 5faeb05..969c646 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_5.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py
index 6c81182..763c7f3 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py
index 98c3a53..b106054 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
index 5466646..2be5f1a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py
index 5d12f36..849cf0b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
Image,
@@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -203,12 +215,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py
index ad3c5f4..001d53e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable
@@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -67,12 +79,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -138,9 +175,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
- NDArray[Shape["* x, * y"], float],
- NDArray[Shape["* x, * y, 3 r_g_b"], float],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
+ NDArray[Shape["* x, * y"], float | int],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@@ -305,13 +342,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of this collection of processed data."""
+ )
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
- name: str = Field(...)
class Images(NWBDataInterface):
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py
index 8358db6..b689f74 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -176,6 +213,20 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@@ -183,8 +234,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -198,10 +249,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEpochs",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
+ )
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
- name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@@ -213,10 +267,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEvents",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@@ -228,10 +285,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralTimeSeries",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class PupilTracking(NWBDataInterface):
@@ -243,10 +303,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class EyeTracking(NWBDataInterface):
@@ -258,10 +320,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class CompassDirection(NWBDataInterface):
@@ -273,10 +337,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "CompassDirection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class Position(NWBDataInterface):
@@ -288,10 +355,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py
index 5c0f451..791d2a1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
index 2676bd5..b9d7e8e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_3_0.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
- data: Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_channels"], float],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Recorded voltage data.""")
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
+ data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -215,6 +249,45 @@ class ElectricalSeries(TimeSeries):
)
+class ElectricalSeriesData(ConfiguredBaseModel):
+ """
+ Recorded voltage data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and 'channel_conversion' (if present).""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_channels"], float | int],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@@ -225,10 +298,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_events, * num_samples"], float],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Spike waveforms.""")
+ data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@@ -238,6 +308,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -250,11 +325,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -287,6 +357,44 @@ class SpikeEventSeries(ElectricalSeries):
)
+class SpikeEventSeriesData(ConfiguredBaseModel):
+ """
+ Spike waveforms.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_events, * num_samples"], float | int],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@@ -385,10 +493,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
+ )
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
- name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@@ -400,10 +510,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
+ )
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class LFP(NWBDataInterface):
@@ -415,10 +527,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@@ -561,7 +673,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
+ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
+SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py
index 93ea1ba..265974b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_3_0.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py
index d692065..841bfb6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBContainer,
@@ -25,7 +25,12 @@ from ...core.v2_3_0.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_3_0.core_nwb_misc import Units
from ...core.v2_3_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_3_0.core_nwb_ophys import ImagingPlane
-from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
+from ...hdmf_common.v1_5_0.hdmf_common_table import (
+ DynamicTable,
+ ElementIdentifiers,
+ VectorData,
+ VectorIndex,
+)
metamodel_version = "None"
@@ -36,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -222,6 +264,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
+ specifications: Optional[dict] = Field(
+ None, description="""Nested dictionary of schema specifications"""
+ )
class NWBFileStimulus(ConfiguredBaseModel):
@@ -320,10 +365,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
- lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
- None,
- description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
- )
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@@ -349,6 +390,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
+ value: Optional[Dict[str, LabMetaData]] = Field(
+ None,
+ description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
+ )
class GeneralSourceScript(ConfiguredBaseModel):
@@ -384,12 +429,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
- electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
- None, description="""Physical group of electrodes."""
- )
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
+ value: Optional[Dict[str, ElectrodeGroup]] = Field(
+ None, description="""Physical group of electrodes."""
+ )
class ExtracellularEphysElectrodes(DynamicTable):
@@ -545,12 +590,12 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
- intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
- None, description="""An intracellular electrode."""
- )
sweep_table: Optional[SweepTable] = Field(
None, description="""The table which groups different PatchClampSeries together."""
)
+ value: Optional[Dict[str, IntracellularElectrode]] = Field(
+ None, description="""An intracellular electrode."""
+ )
class NWBFileIntervals(ConfiguredBaseModel):
@@ -576,7 +621,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
- time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
+ value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py
index 1fb2a04..a6e07ed 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_3_0.core_nwb_base import (
@@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -224,11 +262,25 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- value: Optional[NDArray[Shape["* num_times"], float]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -243,12 +295,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
+ data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@@ -316,12 +368,28 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IZeroClampSeries(CurrentClampSeries):
@@ -476,6 +544,20 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@@ -483,7 +565,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class VoltageClampSeries(PatchClampSeries):
@@ -496,13 +580,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
+ data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@@ -574,27 +658,6 @@ class VoltageClampSeries(PatchClampSeries):
)
-class VoltageClampSeriesData(ConfiguredBaseModel):
- """
- Recorded current.
- """
-
- linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
-
- name: Literal["data"] = Field(
- "data",
- json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
- )
- unit: Literal["amperes"] = Field(
- "amperes",
- description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
- json_schema_extra={
- "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
- },
- )
- value: Any = Field(...)
-
-
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@@ -647,6 +710,43 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
+class VoltageClampSeriesData(ConfiguredBaseModel):
+ """
+ Recorded current.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["amperes"] = Field(
+ "amperes",
+ description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@@ -851,12 +951,28 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IntracellularElectrode(NWBContainer):
@@ -906,15 +1022,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: VectorData[NDArray[Any, int]] = Field(
- ...,
- description="""Sweep number of the PatchClampSeries in that row.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
- )
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@@ -936,6 +1043,15 @@ class SweepTable(DynamicTable):
}
},
)
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
+ ...,
+ description="""Sweep number of the PatchClampSeries in that row.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@@ -958,9 +1074,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
-VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
+VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py
index 8758ca8..cb9f5bf 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
from ...core.v2_3_0.core_nwb_device import Device
@@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -67,12 +79,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -116,7 +153,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@@ -138,7 +175,7 @@ class RGBImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -168,7 +205,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -198,12 +235,9 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
- data: Optional[
- Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, * z"], float],
- ]
- ] = Field(None, description="""Binary data representing images across frames.""")
+ data: Optional[ImageSeriesData] = Field(
+ None, description="""Binary data representing images across frames."""
+ )
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
@@ -214,8 +248,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -263,6 +298,43 @@ class ImageSeries(TimeSeries):
)
+class ImageSeriesData(ConfiguredBaseModel):
+ """
+ Binary data representing images across frames.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, * z"], float | int],
+ ]
+ ] = Field(None)
+
+
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@@ -304,12 +376,9 @@ class ImageMaskSeries(ImageSeries):
}
},
)
- data: Optional[
- Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, * z"], float],
- ]
- ] = Field(None, description="""Binary data representing images across frames.""")
+ data: Optional[ImageSeriesData] = Field(
+ None, description="""Binary data representing images across frames."""
+ )
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
@@ -320,8 +389,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -379,6 +449,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
+ data: OpticalSeriesData = Field(
+ ..., description="""Images presented to subject, either grayscale or RGB"""
+ )
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@@ -387,10 +460,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
- ] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@@ -405,8 +474,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -454,6 +524,43 @@ class OpticalSeries(ImageSeries):
)
+class OpticalSeriesData(ConfiguredBaseModel):
+ """
+ Images presented to subject, either grayscale or RGB
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
+ ]
+ ] = Field(None)
+
+
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the ImageIndexSeries is to allow a static image stack to be stored somewhere, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced ImageSeries, and the timestamps array indicates when that image was displayed.
@@ -464,10 +571,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Index of the frame in the referenced ImageSeries.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IndexSeriesData = Field(
+ ..., description="""Index of the frame in the referenced ImageSeries."""
)
indexed_timeseries: Union[ImageSeries, str] = Field(
...,
@@ -515,13 +620,50 @@ class IndexSeries(TimeSeries):
)
+class IndexSeriesData(ConfiguredBaseModel):
+ """
+ Index of the frame in the referenced ImageSeries.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
+ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
+OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
+IndexSeriesData.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py
index ac3b366..1fc5516 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_3_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -213,6 +251,20 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@@ -220,8 +272,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -236,10 +288,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], str] = Field(
- ...,
- description="""Annotations made during an experiment.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: AnnotationSeriesData = Field(
+ ..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@@ -278,6 +328,43 @@ class AnnotationSeries(TimeSeries):
)
+class AnnotationSeriesData(ConfiguredBaseModel):
+ """
+ Annotations made during an experiment.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], str]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@@ -288,10 +375,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Use values >0 if interval started, <0 if interval ended.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IntervalSeriesData = Field(
+ ..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@@ -330,6 +415,43 @@ class IntervalSeries(TimeSeries):
)
+class IntervalSeriesData(ConfiguredBaseModel):
+ """
+ Use values >0 if interval started, <0 if interval ended.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -417,24 +539,40 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
- None,
- json_schema_extra={
- "linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_times"},
- {"alias": "num_channels"},
- {"alias": "num_bands"},
- ]
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
+ Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_times"},
+ {"alias": "num_channels"},
+ {"alias": "num_bands"},
+ ]
+ }
}
- }
- },
+ },
+ )
)
@@ -504,9 +642,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
- spike_times_index: Optional[Named[VectorIndex]] = Field(
+ electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
- description="""Index into the spike_times dataset.""",
+ description="""Electrode group that each spike unit came from.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
+ electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ None,
+ description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -516,12 +663,9 @@ class Units(DynamicTable):
}
},
)
- spike_times: Optional[UnitsSpikeTimes] = Field(
- None, description="""Spike times for each unit."""
- )
- obs_intervals_index: Optional[Named[VectorIndex]] = Field(
+ electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into the obs_intervals dataset.""",
+ description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -547,9 +691,9 @@ class Units(DynamicTable):
},
)
)
- electrodes_index: Optional[Named[VectorIndex]] = Field(
+ obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into electrodes.""",
+ description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -559,9 +703,12 @@ class Units(DynamicTable):
}
},
)
- electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ spike_times: Optional[UnitsSpikeTimes] = Field(
+ None, description="""Spike times for each unit."""
+ )
+ spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
+ description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -571,41 +718,15 @@ class Units(DynamicTable):
}
},
)
- electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
- None,
- description="""Electrode group that each spike unit came from.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
+ waveform_mean: Optional[UnitsWaveformMean] = Field(
+ None, description="""Spike waveform mean for each spike unit."""
)
- waveform_mean: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
- Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
- )
+ waveform_sd: Optional[UnitsWaveformSd] = Field(
+ None, description="""Spike waveform standard deviation for each spike unit."""
+ )
+ waveforms: Optional[UnitsWaveforms] = Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -671,14 +792,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
+class UnitsWaveformMean(VectorData):
+ """
+ Spike waveform mean for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_mean"] = Field(
+ "waveform_mean",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveformSd(VectorData):
+ """
+ Spike waveform standard deviation for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_sd"] = Field(
+ "waveform_sd",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveforms(VectorData):
+ """
+ Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveforms"] = Field(
+ "waveforms",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
+AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
+IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
+UnitsWaveformMean.model_rebuild()
+UnitsWaveformSd.model_rebuild()
+UnitsWaveforms.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py
index bf95c5c..95c003f 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -121,10 +158,8 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], float] = Field(
- ...,
- description="""Applied power for optogenetic stimulus, in watts.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: OptogeneticSeriesData = Field(
+ ..., description="""Applied power for optogenetic stimulus, in watts."""
)
site: Union[OptogeneticStimulusSite, str] = Field(
...,
@@ -172,6 +207,41 @@ class OptogeneticSeries(TimeSeries):
)
+class OptogeneticSeriesData(ConfiguredBaseModel):
+ """
+ Applied power for optogenetic stimulus, in watts.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["watts"] = Field(
+ "watts",
+ description="""Unit of measurement for data, which is fixed to 'watts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@@ -202,4 +272,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
+OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py
index 670269a..a87f028 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_3_0.core_nwb_base import (
@@ -27,7 +28,7 @@ from ...core.v2_3_0.core_nwb_base import (
TimeSeriesSync,
)
from ...core.v2_3_0.core_nwb_device import Device
-from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
+from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -179,12 +217,9 @@ class TwoPhotonSeries(ImageSeries):
}
},
)
- data: Optional[
- Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, * z"], float],
- ]
- ] = Field(None, description="""Binary data representing images across frames.""")
+ data: Optional[ImageSeriesData] = Field(
+ None, description="""Binary data representing images across frames."""
+ )
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
@@ -195,8 +230,9 @@ class TwoPhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -254,9 +290,7 @@ class RoiResponseSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(..., description="""Signals from ROIs.""")
+ data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
@@ -306,6 +340,43 @@ class RoiResponseSeries(TimeSeries):
)
+class RoiResponseSeriesData(ConfiguredBaseModel):
+ """
+ Signals from ROIs.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
@@ -315,10 +386,10 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}})
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class Fluorescence(NWBDataInterface):
@@ -330,10 +401,12 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}}
+ )
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class ImageSegmentation(NWBDataInterface):
@@ -345,10 +418,13 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "ImageSegmentation",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}},
+ )
value: Optional[Dict[str, PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
- name: str = Field(...)
class PlaneSegmentation(DynamicTable):
@@ -372,6 +448,10 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
+ pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ None,
+ description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ )
pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into pixel_mask.""",
@@ -384,9 +464,9 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
None,
- description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -400,10 +480,6 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
- None,
- description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
- )
reference_images: Optional[Dict[str, ImageSeries]] = Field(
None,
description="""Image stacks that the segmentation masks apply to.""",
@@ -664,10 +740,13 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "MotionCorrection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}},
+ )
value: Optional[Dict[str, CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
- name: str = Field(...)
class CorrectedImageStack(NWBDataInterface):
@@ -702,6 +781,7 @@ class CorrectedImageStack(NWBDataInterface):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TwoPhotonSeries.model_rebuild()
RoiResponseSeries.model_rebuild()
+RoiResponseSeriesData.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
index 5c78658..1ad221c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py
index 2125d57..b43007a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
Image,
@@ -38,6 +38,7 @@ from ...core.v2_3_0.core_nwb_ecephys import (
ClusterWaveforms,
Clustering,
ElectricalSeries,
+ ElectricalSeriesData,
ElectrodeGroup,
ElectrodeGroupPosition,
EventDetection,
@@ -46,6 +47,7 @@ from ...core.v2_3_0.core_nwb_ecephys import (
FilteredEphys,
LFP,
SpikeEventSeries,
+ SpikeEventSeriesData,
)
from ...core.v2_3_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries
from ...core.v2_3_0.core_nwb_file import (
@@ -87,9 +89,12 @@ from ...core.v2_3_0.core_nwb_image import (
GrayscaleImage,
ImageMaskSeries,
ImageSeries,
+ ImageSeriesData,
ImageSeriesExternalFile,
IndexSeries,
+ IndexSeriesData,
OpticalSeries,
+ OpticalSeriesData,
RGBAImage,
RGBImage,
)
@@ -97,14 +102,23 @@ from ...core.v2_3_0.core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
+ AnnotationSeriesData,
DecompositionSeries,
DecompositionSeriesBands,
DecompositionSeriesData,
IntervalSeries,
+ IntervalSeriesData,
Units,
UnitsSpikeTimes,
+ UnitsWaveformMean,
+ UnitsWaveformSd,
+ UnitsWaveforms,
+)
+from ...core.v2_3_0.core_nwb_ogen import (
+ OptogeneticSeries,
+ OptogeneticSeriesData,
+ OptogeneticStimulusSite,
)
-from ...core.v2_3_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from ...core.v2_3_0.core_nwb_ophys import (
CorrectedImageStack,
DfOverF,
@@ -120,6 +134,7 @@ from ...core.v2_3_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
RoiResponseSeries,
+ RoiResponseSeriesData,
TwoPhotonSeries,
)
from ...core.v2_3_0.core_nwb_retinotopy import (
@@ -161,7 +176,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -171,7 +186,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -182,7 +197,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -195,6 +210,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -205,12 +232,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py
index f8b6d99..c823f7f 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py
@@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +92,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -109,7 +146,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -333,9 +370,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
- NDArray[Shape["* x, * y"], float],
- NDArray[Shape["* x, * y, 3 r_g_b"], float],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
+ NDArray[Shape["* x, * y"], float | int],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@@ -500,13 +537,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of this collection of processed data."""
+ )
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
- name: str = Field(...)
class Images(NWBDataInterface):
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py
index 7c0abb8..4bb3545 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -176,6 +213,20 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@@ -183,8 +234,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -198,10 +249,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEpochs",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
+ )
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
- name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@@ -213,10 +267,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEvents",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@@ -228,10 +285,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralTimeSeries",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class PupilTracking(NWBDataInterface):
@@ -243,10 +303,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class EyeTracking(NWBDataInterface):
@@ -258,10 +320,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class CompassDirection(NWBDataInterface):
@@ -273,10 +337,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "CompassDirection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class Position(NWBDataInterface):
@@ -288,10 +355,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py
index 436d2d4..60fafeb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
index ac26b29..607a1fc 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_4_0.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
- data: Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_channels"], float],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Recorded voltage data.""")
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
+ data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -215,6 +249,45 @@ class ElectricalSeries(TimeSeries):
)
+class ElectricalSeriesData(ConfiguredBaseModel):
+ """
+ Recorded voltage data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and 'channel_conversion' (if present).""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_channels"], float | int],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@@ -225,10 +298,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_events, * num_samples"], float],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Spike waveforms.""")
+ data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@@ -238,6 +308,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -250,11 +325,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -287,6 +357,44 @@ class SpikeEventSeries(ElectricalSeries):
)
+class SpikeEventSeriesData(ConfiguredBaseModel):
+ """
+ Spike waveforms.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_events, * num_samples"], float | int],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@@ -385,10 +493,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
+ )
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
- name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@@ -400,10 +510,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
+ )
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class LFP(NWBDataInterface):
@@ -415,10 +527,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@@ -561,7 +673,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
+ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
+SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py
index 25894a3..009e0cb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_4_0.core_nwb_base import TimeSeries
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py
index 84d5b9a..6f0fcea 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import (
NWBContainer,
@@ -33,7 +33,12 @@ from ...core.v2_4_0.core_nwb_icephys import (
from ...core.v2_4_0.core_nwb_misc import Units
from ...core.v2_4_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_4_0.core_nwb_ophys import ImagingPlane
-from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
+from ...hdmf_common.v1_5_0.hdmf_common_table import (
+ DynamicTable,
+ ElementIdentifiers,
+ VectorData,
+ VectorIndex,
+)
metamodel_version = "None"
@@ -44,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -54,7 +59,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -65,7 +70,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -78,6 +83,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -88,12 +105,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -230,6 +272,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
+ specifications: Optional[dict] = Field(
+ None, description="""Nested dictionary of schema specifications"""
+ )
class NWBFileStimulus(ConfiguredBaseModel):
@@ -328,10 +373,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
- lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
- None,
- description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
- )
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@@ -357,6 +398,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
+ value: Optional[Dict[str, LabMetaData]] = Field(
+ None,
+ description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
+ )
class GeneralSourceScript(ConfiguredBaseModel):
@@ -392,12 +437,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
- electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
- None, description="""Physical group of electrodes."""
- )
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
+ value: Optional[Dict[str, ElectrodeGroup]] = Field(
+ None, description="""Physical group of electrodes."""
+ )
class ExtracellularEphysElectrodes(DynamicTable):
@@ -553,9 +598,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
- intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
- None, description="""An intracellular electrode."""
- )
sweep_table: Optional[SweepTable] = Field(
None,
description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""",
@@ -580,6 +622,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""",
)
+ value: Optional[Dict[str, IntracellularElectrode]] = Field(
+ None, description="""An intracellular electrode."""
+ )
class NWBFileIntervals(ConfiguredBaseModel):
@@ -605,7 +650,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
- time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
+ value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py
index d4ebcb3..d8b96b9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_4_0.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -227,11 +265,25 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- value: Optional[NDArray[Shape["* num_times"], float]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -246,12 +298,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
+ data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@@ -319,12 +371,28 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IZeroClampSeries(CurrentClampSeries):
@@ -479,6 +547,20 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@@ -486,7 +568,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class VoltageClampSeries(PatchClampSeries):
@@ -499,13 +583,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
+ data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@@ -577,27 +661,6 @@ class VoltageClampSeries(PatchClampSeries):
)
-class VoltageClampSeriesData(ConfiguredBaseModel):
- """
- Recorded current.
- """
-
- linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
-
- name: Literal["data"] = Field(
- "data",
- json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
- )
- unit: Literal["amperes"] = Field(
- "amperes",
- description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
- json_schema_extra={
- "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
- },
- )
- value: Any = Field(...)
-
-
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@@ -650,6 +713,43 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
+class VoltageClampSeriesData(ConfiguredBaseModel):
+ """
+ Recorded current.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["amperes"] = Field(
+ "amperes",
+ description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@@ -854,12 +954,28 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IntracellularElectrode(NWBContainer):
@@ -909,15 +1025,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: VectorData[NDArray[Any, int]] = Field(
- ...,
- description="""Sweep number of the PatchClampSeries in that row.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
- )
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@@ -939,6 +1046,15 @@ class SweepTable(DynamicTable):
}
},
)
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
+ ...,
+ description="""Sweep number of the PatchClampSeries in that row.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@@ -1120,11 +1236,15 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
)
+ responses: IntracellularResponsesTable = Field(
+ ..., description="""Table for storing intracellular response related metadata."""
+ )
stimuli: IntracellularStimuliTable = Field(
..., description="""Table for storing intracellular stimulus related metadata."""
)
- responses: IntracellularResponsesTable = Field(
- ..., description="""Table for storing intracellular response related metadata."""
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
)
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
@@ -1465,9 +1585,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
-VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
+VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py
index 8fd3288..d4f8f0e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
from ...core.v2_4_0.core_nwb_device import Device
@@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -67,12 +79,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -116,7 +153,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@@ -138,7 +175,7 @@ class RGBImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -168,7 +205,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -198,9 +235,7 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -214,8 +249,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -263,6 +299,43 @@ class ImageSeries(TimeSeries):
)
+class ImageSeriesData(ConfiguredBaseModel):
+ """
+ Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, * z"], float | int],
+ ]
+ ] = Field(None)
+
+
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@@ -304,9 +377,7 @@ class ImageMaskSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -320,8 +391,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -379,6 +451,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
+ data: OpticalSeriesData = Field(
+ ..., description="""Images presented to subject, either grayscale or RGB"""
+ )
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@@ -387,10 +462,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
- ] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@@ -405,8 +476,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -454,6 +526,43 @@ class OpticalSeries(ImageSeries):
)
+class OpticalSeriesData(ConfiguredBaseModel):
+ """
+ Images presented to subject, either grayscale or RGB
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
+ ]
+ ] = Field(None)
+
+
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the ImageIndexSeries is to allow a static image stack to be stored somewhere, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced ImageSeries, and the timestamps array indicates when that image was displayed.
@@ -464,10 +573,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Index of the frame in the referenced ImageSeries.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IndexSeriesData = Field(
+ ..., description="""Index of the frame in the referenced ImageSeries."""
)
indexed_timeseries: Union[ImageSeries, str] = Field(
...,
@@ -515,13 +622,50 @@ class IndexSeries(TimeSeries):
)
+class IndexSeriesData(ConfiguredBaseModel):
+ """
+ Index of the frame in the referenced ImageSeries.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
+ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
+OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
+IndexSeriesData.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py
index 3ab6b75..d406c06 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_4_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -213,6 +251,20 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@@ -220,8 +272,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -236,10 +288,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], str] = Field(
- ...,
- description="""Annotations made during an experiment.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: AnnotationSeriesData = Field(
+ ..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@@ -278,6 +328,43 @@ class AnnotationSeries(TimeSeries):
)
+class AnnotationSeriesData(ConfiguredBaseModel):
+ """
+ Annotations made during an experiment.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], str]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@@ -288,10 +375,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Use values >0 if interval started, <0 if interval ended.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IntervalSeriesData = Field(
+ ..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@@ -330,6 +415,43 @@ class IntervalSeries(TimeSeries):
)
+class IntervalSeriesData(ConfiguredBaseModel):
+ """
+ Use values >0 if interval started, <0 if interval ended.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -417,24 +539,40 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
- None,
- json_schema_extra={
- "linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_times"},
- {"alias": "num_channels"},
- {"alias": "num_bands"},
- ]
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
+ Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_times"},
+ {"alias": "num_channels"},
+ {"alias": "num_bands"},
+ ]
+ }
}
- }
- },
+ },
+ )
)
@@ -504,9 +642,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
- spike_times_index: Optional[Named[VectorIndex]] = Field(
+ electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
- description="""Index into the spike_times dataset.""",
+ description="""Electrode group that each spike unit came from.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
+ electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ None,
+ description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -516,12 +663,9 @@ class Units(DynamicTable):
}
},
)
- spike_times: Optional[UnitsSpikeTimes] = Field(
- None, description="""Spike times for each unit."""
- )
- obs_intervals_index: Optional[Named[VectorIndex]] = Field(
+ electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into the obs_intervals dataset.""",
+ description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -547,9 +691,9 @@ class Units(DynamicTable):
},
)
)
- electrodes_index: Optional[Named[VectorIndex]] = Field(
+ obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into electrodes.""",
+ description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -559,9 +703,12 @@ class Units(DynamicTable):
}
},
)
- electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ spike_times: Optional[UnitsSpikeTimes] = Field(
+ None, description="""Spike times for each unit."""
+ )
+ spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
+ description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -571,41 +718,15 @@ class Units(DynamicTable):
}
},
)
- electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
- None,
- description="""Electrode group that each spike unit came from.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
+ waveform_mean: Optional[UnitsWaveformMean] = Field(
+ None, description="""Spike waveform mean for each spike unit."""
)
- waveform_mean: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
- Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
- )
+ waveform_sd: Optional[UnitsWaveformSd] = Field(
+ None, description="""Spike waveform standard deviation for each spike unit."""
+ )
+ waveforms: Optional[UnitsWaveforms] = Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -671,14 +792,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
+class UnitsWaveformMean(VectorData):
+ """
+ Spike waveform mean for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_mean"] = Field(
+ "waveform_mean",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveformSd(VectorData):
+ """
+ Spike waveform standard deviation for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_sd"] = Field(
+ "waveform_sd",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveforms(VectorData):
+ """
+ Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveforms"] = Field(
+ "waveforms",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
+AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
+IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
+UnitsWaveformMean.model_rebuild()
+UnitsWaveformSd.model_rebuild()
+UnitsWaveforms.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py
index 350a398..58102f2 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -121,10 +158,8 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], float] = Field(
- ...,
- description="""Applied power for optogenetic stimulus, in watts.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: OptogeneticSeriesData = Field(
+ ..., description="""Applied power for optogenetic stimulus, in watts."""
)
site: Union[OptogeneticStimulusSite, str] = Field(
...,
@@ -172,6 +207,41 @@ class OptogeneticSeries(TimeSeries):
)
+class OptogeneticSeriesData(ConfiguredBaseModel):
+ """
+ Applied power for optogenetic stimulus, in watts.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["watts"] = Field(
+ "watts",
+ description="""Unit of measurement for data, which is fixed to 'watts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@@ -202,4 +272,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
+OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py
index 9f6e191..07b0738 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_4_0.core_nwb_base import (
@@ -27,7 +28,7 @@ from ...core.v2_4_0.core_nwb_base import (
TimeSeriesSync,
)
from ...core.v2_4_0.core_nwb_device import Device
-from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
+from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -179,9 +217,7 @@ class TwoPhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -195,8 +231,9 @@ class TwoPhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -254,9 +291,7 @@ class RoiResponseSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(..., description="""Signals from ROIs.""")
+ data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
@@ -306,6 +341,43 @@ class RoiResponseSeries(TimeSeries):
)
+class RoiResponseSeriesData(ConfiguredBaseModel):
+ """
+ Signals from ROIs.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
@@ -315,10 +387,10 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}})
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class Fluorescence(NWBDataInterface):
@@ -330,10 +402,12 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}}
+ )
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class ImageSegmentation(NWBDataInterface):
@@ -345,10 +419,13 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "ImageSegmentation",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}},
+ )
value: Optional[Dict[str, PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
- name: str = Field(...)
class PlaneSegmentation(DynamicTable):
@@ -372,6 +449,10 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
+ pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ None,
+ description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ )
pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into pixel_mask.""",
@@ -384,9 +465,9 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
None,
- description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -400,10 +481,6 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
- None,
- description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
- )
reference_images: Optional[Dict[str, ImageSeries]] = Field(
None,
description="""Image stacks that the segmentation masks apply to.""",
@@ -664,10 +741,13 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "MotionCorrection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}},
+ )
value: Optional[Dict[str, CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
- name: str = Field(...)
class CorrectedImageStack(NWBDataInterface):
@@ -702,6 +782,7 @@ class CorrectedImageStack(NWBDataInterface):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TwoPhotonSeries.model_rebuild()
RoiResponseSeries.model_rebuild()
+RoiResponseSeriesData.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
index ffc194e..9943610 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py
index 620dcf2..5006e35 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_4_0.core_nwb_base import (
Image,
@@ -39,6 +39,7 @@ from ...core.v2_4_0.core_nwb_ecephys import (
ClusterWaveforms,
Clustering,
ElectricalSeries,
+ ElectricalSeriesData,
ElectrodeGroup,
ElectrodeGroupPosition,
EventDetection,
@@ -47,6 +48,7 @@ from ...core.v2_4_0.core_nwb_ecephys import (
FilteredEphys,
LFP,
SpikeEventSeries,
+ SpikeEventSeriesData,
)
from ...core.v2_4_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries
from ...core.v2_4_0.core_nwb_file import (
@@ -100,9 +102,12 @@ from ...core.v2_4_0.core_nwb_image import (
GrayscaleImage,
ImageMaskSeries,
ImageSeries,
+ ImageSeriesData,
ImageSeriesExternalFile,
IndexSeries,
+ IndexSeriesData,
OpticalSeries,
+ OpticalSeriesData,
RGBAImage,
RGBImage,
)
@@ -110,14 +115,23 @@ from ...core.v2_4_0.core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
+ AnnotationSeriesData,
DecompositionSeries,
DecompositionSeriesBands,
DecompositionSeriesData,
IntervalSeries,
+ IntervalSeriesData,
Units,
UnitsSpikeTimes,
+ UnitsWaveformMean,
+ UnitsWaveformSd,
+ UnitsWaveforms,
+)
+from ...core.v2_4_0.core_nwb_ogen import (
+ OptogeneticSeries,
+ OptogeneticSeriesData,
+ OptogeneticStimulusSite,
)
-from ...core.v2_4_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from ...core.v2_4_0.core_nwb_ophys import (
CorrectedImageStack,
DfOverF,
@@ -133,6 +147,7 @@ from ...core.v2_4_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
RoiResponseSeries,
+ RoiResponseSeriesData,
TwoPhotonSeries,
)
from ...core.v2_4_0.core_nwb_retinotopy import (
@@ -174,7 +189,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -184,7 +199,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -195,7 +210,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -208,6 +223,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -218,12 +245,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py
index 2db9763..6298cf0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py
@@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -91,12 +103,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -120,7 +157,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -359,9 +396,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
- NDArray[Shape["* x, * y"], float],
- NDArray[Shape["* x, * y, 3 r_g_b"], float],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
+ NDArray[Shape["* x, * y"], float | int],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@@ -551,13 +588,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of this collection of processed data."""
+ )
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
- name: str = Field(...)
class Images(NWBDataInterface):
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py
index 89c1038..8aa7fa4 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -176,6 +213,24 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -183,10 +238,10 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, 1 x"], float],
- NDArray[Shape["* num_times, 2 x_y"], float],
- NDArray[Shape["* num_times, 3 x_y_z"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, 1 x"], float | int],
+ NDArray[Shape["* num_times, 2 x_y"], float | int],
+ NDArray[Shape["* num_times, 3 x_y_z"], float | int],
]
] = Field(None)
@@ -200,10 +255,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEpochs",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
+ )
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
- name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@@ -215,10 +273,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEvents",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@@ -230,10 +291,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralTimeSeries",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class PupilTracking(NWBDataInterface):
@@ -245,10 +309,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class EyeTracking(NWBDataInterface):
@@ -260,10 +326,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class CompassDirection(NWBDataInterface):
@@ -275,10 +343,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "CompassDirection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class Position(NWBDataInterface):
@@ -290,10 +361,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py
index e4cf279..1dd9b18 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
index 91c2222..4091b82 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_5_0.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
- data: Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_channels"], float],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Recorded voltage data.""")
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
+ data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -215,6 +249,49 @@ class ElectricalSeries(TimeSeries):
)
+class ElectricalSeriesData(ConfiguredBaseModel):
+ """
+ Recorded voltage data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_channels"], float | int],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@@ -225,10 +302,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_events, * num_samples"], float],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Spike waveforms.""")
+ data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@@ -238,6 +312,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -250,11 +329,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -287,6 +361,48 @@ class SpikeEventSeries(ElectricalSeries):
)
+class SpikeEventSeriesData(ConfiguredBaseModel):
+ """
+ Spike waveforms.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_events, * num_samples"], float | int],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@@ -385,10 +501,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
+ )
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
- name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@@ -400,10 +518,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
+ )
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class LFP(NWBDataInterface):
@@ -415,10 +535,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@@ -561,7 +681,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
+ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
+SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py
index ab92eb7..512ad37 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_5_0.core_nwb_base import TimeSeriesReferenceVectorData
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py
index 6c056a6..ef8da1f 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import (
Images,
@@ -34,7 +34,12 @@ from ...core.v2_5_0.core_nwb_icephys import (
from ...core.v2_5_0.core_nwb_misc import Units
from ...core.v2_5_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_5_0.core_nwb_ophys import ImagingPlane
-from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
+from ...hdmf_common.v1_5_0.hdmf_common_table import (
+ DynamicTable,
+ ElementIdentifiers,
+ VectorData,
+ VectorIndex,
+)
metamodel_version = "None"
@@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +84,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +106,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -231,6 +273,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
+ specifications: Optional[dict] = Field(
+ None, description="""Nested dictionary of schema specifications"""
+ )
class NWBFileStimulus(ConfiguredBaseModel):
@@ -331,10 +376,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
- lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
- None,
- description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
- )
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@@ -360,6 +401,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
+ value: Optional[Dict[str, LabMetaData]] = Field(
+ None,
+ description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
+ )
class GeneralSourceScript(ConfiguredBaseModel):
@@ -395,12 +440,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
- electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
- None, description="""Physical group of electrodes."""
- )
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
+ value: Optional[Dict[str, ElectrodeGroup]] = Field(
+ None, description="""Physical group of electrodes."""
+ )
class ExtracellularEphysElectrodes(DynamicTable):
@@ -556,9 +601,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
- intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
- None, description="""An intracellular electrode."""
- )
sweep_table: Optional[SweepTable] = Field(
None,
description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""",
@@ -583,6 +625,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""",
)
+ value: Optional[Dict[str, IntracellularElectrode]] = Field(
+ None, description="""An intracellular electrode."""
+ )
class NWBFileIntervals(ConfiguredBaseModel):
@@ -608,7 +653,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
- time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
+ value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py
index b500a82..bb6fee9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_5_0.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -227,11 +265,29 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- value: Optional[NDArray[Shape["* num_times"], float]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -246,12 +302,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
+ data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@@ -319,12 +375,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IZeroClampSeries(CurrentClampSeries):
@@ -479,6 +555,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -486,7 +580,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class VoltageClampSeries(PatchClampSeries):
@@ -499,13 +595,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
+ data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@@ -577,27 +673,6 @@ class VoltageClampSeries(PatchClampSeries):
)
-class VoltageClampSeriesData(ConfiguredBaseModel):
- """
- Recorded current.
- """
-
- linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
-
- name: Literal["data"] = Field(
- "data",
- json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
- )
- unit: Literal["amperes"] = Field(
- "amperes",
- description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
- json_schema_extra={
- "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
- },
- )
- value: Any = Field(...)
-
-
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@@ -650,6 +725,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
+class VoltageClampSeriesData(ConfiguredBaseModel):
+ """
+ Recorded current.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["amperes"] = Field(
+ "amperes",
+ description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@@ -854,12 +970,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IntracellularElectrode(NWBContainer):
@@ -910,15 +1046,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: VectorData[NDArray[Any, int]] = Field(
- ...,
- description="""Sweep number of the PatchClampSeries in that row.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
- )
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@@ -940,6 +1067,15 @@ class SweepTable(DynamicTable):
}
},
)
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
+ ...,
+ description="""Sweep number of the PatchClampSeries in that row.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@@ -1121,11 +1257,15 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
)
+ responses: IntracellularResponsesTable = Field(
+ ..., description="""Table for storing intracellular response related metadata."""
+ )
stimuli: IntracellularStimuliTable = Field(
..., description="""Table for storing intracellular stimulus related metadata."""
)
- responses: IntracellularResponsesTable = Field(
- ..., description="""Table for storing intracellular response related metadata."""
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
)
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
@@ -1466,9 +1606,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
-VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
+VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py
index 520d249..cce6df5 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import (
Image,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -73,12 +85,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -122,7 +159,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@@ -144,7 +181,7 @@ class RGBImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -174,7 +211,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -204,9 +241,7 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -220,8 +255,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -269,6 +305,47 @@ class ImageSeries(TimeSeries):
)
+class ImageSeriesData(ConfiguredBaseModel):
+ """
+ Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, * z"], float | int],
+ ]
+ ] = Field(None)
+
+
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@@ -310,9 +387,7 @@ class ImageMaskSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -326,8 +401,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -385,6 +461,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
+ data: OpticalSeriesData = Field(
+ ..., description="""Images presented to subject, either grayscale or RGB"""
+ )
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@@ -393,10 +472,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
- ] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@@ -411,8 +486,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -460,6 +536,47 @@ class OpticalSeries(ImageSeries):
)
+class OpticalSeriesData(ConfiguredBaseModel):
+ """
+ Images presented to subject, either grayscale or RGB
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
+ ]
+ ] = Field(None)
+
+
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
@@ -470,10 +587,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Index of the image (using zero-indexing) in the linked Images object.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IndexSeriesData = Field(
+ ..., description="""Index of the image (using zero-indexing) in the linked Images object."""
)
indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
@@ -530,13 +645,52 @@ class IndexSeries(TimeSeries):
)
+class IndexSeriesData(ConfiguredBaseModel):
+ """
+ Index of the image (using zero-indexing) in the linked Images object.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["N/A"] = Field(
+ "N/A",
+ description="""This field is unused by IndexSeries and has the value N/A.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
+ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
+OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
+IndexSeriesData.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py
index 7901288..9e3fe7c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_5_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -213,6 +251,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@@ -220,8 +276,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -236,10 +292,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], str] = Field(
- ...,
- description="""Annotations made during an experiment.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: AnnotationSeriesData = Field(
+ ..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@@ -278,6 +332,47 @@ class AnnotationSeries(TimeSeries):
)
+class AnnotationSeriesData(ConfiguredBaseModel):
+ """
+ Annotations made during an experiment.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], str]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@@ -288,10 +383,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Use values >0 if interval started, <0 if interval ended.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IntervalSeriesData = Field(
+ ..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@@ -330,6 +423,47 @@ class IntervalSeries(TimeSeries):
)
+class IntervalSeriesData(ConfiguredBaseModel):
+ """
+ Use values >0 if interval started, <0 if interval ended.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -417,24 +551,44 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
- None,
- json_schema_extra={
- "linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_times"},
- {"alias": "num_channels"},
- {"alias": "num_bands"},
- ]
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
+ Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_times"},
+ {"alias": "num_channels"},
+ {"alias": "num_bands"},
+ ]
+ }
}
- }
- },
+ },
+ )
)
@@ -504,9 +658,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
- spike_times_index: Optional[Named[VectorIndex]] = Field(
+ electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
- description="""Index into the spike_times dataset.""",
+ description="""Electrode group that each spike unit came from.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
+ electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ None,
+ description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -516,12 +679,9 @@ class Units(DynamicTable):
}
},
)
- spike_times: Optional[UnitsSpikeTimes] = Field(
- None, description="""Spike times for each unit."""
- )
- obs_intervals_index: Optional[Named[VectorIndex]] = Field(
+ electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into the obs_intervals dataset.""",
+ description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -547,9 +707,9 @@ class Units(DynamicTable):
},
)
)
- electrodes_index: Optional[Named[VectorIndex]] = Field(
+ obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into electrodes.""",
+ description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -559,9 +719,12 @@ class Units(DynamicTable):
}
},
)
- electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ spike_times: Optional[UnitsSpikeTimes] = Field(
+ None, description="""Spike times for each unit."""
+ )
+ spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
+ description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -571,41 +734,15 @@ class Units(DynamicTable):
}
},
)
- electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
- None,
- description="""Electrode group that each spike unit came from.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
+ waveform_mean: Optional[UnitsWaveformMean] = Field(
+ None, description="""Spike waveform mean for each spike unit."""
)
- waveform_mean: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
- Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
- )
+ waveform_sd: Optional[UnitsWaveformSd] = Field(
+ None, description="""Spike waveform standard deviation for each spike unit."""
+ )
+ waveforms: Optional[UnitsWaveforms] = Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -671,14 +808,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
+class UnitsWaveformMean(VectorData):
+ """
+ Spike waveform mean for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_mean"] = Field(
+ "waveform_mean",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveformSd(VectorData):
+ """
+ Spike waveform standard deviation for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_sd"] = Field(
+ "waveform_sd",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveforms(VectorData):
+ """
+ Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveforms"] = Field(
+ "waveforms",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
+AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
+IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
+UnitsWaveformMean.model_rebuild()
+UnitsWaveformSd.model_rebuild()
+UnitsWaveforms.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py
index e39e6b2..5d5b7fb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -121,10 +158,8 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], float] = Field(
- ...,
- description="""Applied power for optogenetic stimulus, in watts.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: OptogeneticSeriesData = Field(
+ ..., description="""Applied power for optogenetic stimulus, in watts."""
)
site: Union[OptogeneticStimulusSite, str] = Field(
...,
@@ -172,6 +207,45 @@ class OptogeneticSeries(TimeSeries):
)
+class OptogeneticSeriesData(ConfiguredBaseModel):
+ """
+ Applied power for optogenetic stimulus, in watts.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["watts"] = Field(
+ "watts",
+ description="""Unit of measurement for data, which is fixed to 'watts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@@ -202,4 +276,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
+OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py
index 6107f4c..cd6b4a6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_5_0.core_nwb_base import (
@@ -27,7 +28,7 @@ from ...core.v2_5_0.core_nwb_base import (
TimeSeriesSync,
)
from ...core.v2_5_0.core_nwb_device import Device
-from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
+from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -179,9 +217,7 @@ class TwoPhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -195,8 +231,9 @@ class TwoPhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -254,9 +291,7 @@ class RoiResponseSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(..., description="""Signals from ROIs.""")
+ data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
@@ -306,6 +341,47 @@ class RoiResponseSeries(TimeSeries):
)
+class RoiResponseSeriesData(ConfiguredBaseModel):
+ """
+ Signals from ROIs.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
@@ -315,10 +391,10 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}})
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class Fluorescence(NWBDataInterface):
@@ -330,10 +406,12 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}}
+ )
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class ImageSegmentation(NWBDataInterface):
@@ -345,10 +423,13 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "ImageSegmentation",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}},
+ )
value: Optional[Dict[str, PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
- name: str = Field(...)
class PlaneSegmentation(DynamicTable):
@@ -372,6 +453,10 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
+ pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ None,
+ description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ )
pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into pixel_mask.""",
@@ -384,9 +469,9 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
None,
- description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -400,10 +485,6 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
- None,
- description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
- )
reference_images: Optional[Dict[str, ImageSeries]] = Field(
None,
description="""Image stacks that the segmentation masks apply to.""",
@@ -664,10 +745,13 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "MotionCorrection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}},
+ )
value: Optional[Dict[str, CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
- name: str = Field(...)
class CorrectedImageStack(NWBDataInterface):
@@ -702,6 +786,7 @@ class CorrectedImageStack(NWBDataInterface):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TwoPhotonSeries.model_rebuild()
RoiResponseSeries.model_rebuild()
+RoiResponseSeriesData.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
index b72f7b4..01bb0a3 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py
index 7aaa8a2..8704767 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_5_0.core_nwb_base import (
Image,
@@ -40,6 +40,7 @@ from ...core.v2_5_0.core_nwb_ecephys import (
ClusterWaveforms,
Clustering,
ElectricalSeries,
+ ElectricalSeriesData,
ElectrodeGroup,
ElectrodeGroupPosition,
EventDetection,
@@ -48,6 +49,7 @@ from ...core.v2_5_0.core_nwb_ecephys import (
FilteredEphys,
LFP,
SpikeEventSeries,
+ SpikeEventSeriesData,
)
from ...core.v2_5_0.core_nwb_epoch import TimeIntervals
from ...core.v2_5_0.core_nwb_file import (
@@ -101,9 +103,12 @@ from ...core.v2_5_0.core_nwb_image import (
GrayscaleImage,
ImageMaskSeries,
ImageSeries,
+ ImageSeriesData,
ImageSeriesExternalFile,
IndexSeries,
+ IndexSeriesData,
OpticalSeries,
+ OpticalSeriesData,
RGBAImage,
RGBImage,
)
@@ -111,14 +116,23 @@ from ...core.v2_5_0.core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
+ AnnotationSeriesData,
DecompositionSeries,
DecompositionSeriesBands,
DecompositionSeriesData,
IntervalSeries,
+ IntervalSeriesData,
Units,
UnitsSpikeTimes,
+ UnitsWaveformMean,
+ UnitsWaveformSd,
+ UnitsWaveforms,
+)
+from ...core.v2_5_0.core_nwb_ogen import (
+ OptogeneticSeries,
+ OptogeneticSeriesData,
+ OptogeneticStimulusSite,
)
-from ...core.v2_5_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from ...core.v2_5_0.core_nwb_ophys import (
CorrectedImageStack,
DfOverF,
@@ -134,6 +148,7 @@ from ...core.v2_5_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
RoiResponseSeries,
+ RoiResponseSeriesData,
TwoPhotonSeries,
)
from ...core.v2_5_0.core_nwb_retinotopy import (
@@ -175,7 +190,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -185,7 +200,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -196,7 +211,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -209,6 +224,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -219,12 +246,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
index 4837ae7..2d699e9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
@@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -91,12 +103,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -120,7 +157,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -359,9 +396,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
- NDArray[Shape["* x, * y"], float],
- NDArray[Shape["* x, * y, 3 r_g_b"], float],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
+ NDArray[Shape["* x, * y"], float | int],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@@ -551,13 +588,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of this collection of processed data."""
+ )
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
- name: str = Field(...)
class Images(NWBDataInterface):
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
index 7e4ad59..90ce123 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -176,6 +213,24 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -183,10 +238,10 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, 1 x"], float],
- NDArray[Shape["* num_times, 2 x_y"], float],
- NDArray[Shape["* num_times, 3 x_y_z"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, 1 x"], float | int],
+ NDArray[Shape["* num_times, 2 x_y"], float | int],
+ NDArray[Shape["* num_times, 3 x_y_z"], float | int],
]
] = Field(None)
@@ -200,10 +255,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEpochs",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
+ )
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
- name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@@ -215,10 +273,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEvents",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@@ -230,10 +291,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralTimeSeries",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class PupilTracking(NWBDataInterface):
@@ -245,10 +309,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class EyeTracking(NWBDataInterface):
@@ -260,10 +326,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class CompassDirection(NWBDataInterface):
@@ -275,10 +343,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "CompassDirection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class Position(NWBDataInterface):
@@ -290,10 +361,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
index c57186b..9ef9121 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
index 0529035..92800fd 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_6_0_alpha.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
- data: Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_channels"], float],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Recorded voltage data.""")
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
+ data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -215,6 +249,49 @@ class ElectricalSeries(TimeSeries):
)
+class ElectricalSeriesData(ConfiguredBaseModel):
+ """
+ Recorded voltage data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_channels"], float | int],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@@ -225,10 +302,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_events, * num_samples"], float],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Spike waveforms.""")
+ data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@@ -238,6 +312,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -250,11 +329,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -287,6 +361,48 @@ class SpikeEventSeries(ElectricalSeries):
)
+class SpikeEventSeriesData(ConfiguredBaseModel):
+ """
+ Spike waveforms.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_events, * num_samples"], float | int],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@@ -385,10 +501,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
+ )
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
- name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@@ -400,10 +518,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
+ )
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class LFP(NWBDataInterface):
@@ -415,10 +535,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@@ -561,7 +681,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
+ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
+SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
index d3fa53b..5951970 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_6_0_alpha.core_nwb_base import TimeSeriesReferenceVectorData
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
index 975e51c..a4b7a33 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import (
Images,
@@ -34,7 +34,12 @@ from ...core.v2_6_0_alpha.core_nwb_icephys import (
from ...core.v2_6_0_alpha.core_nwb_misc import Units
from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_6_0_alpha.core_nwb_ophys import ImagingPlane
-from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
+from ...hdmf_common.v1_5_0.hdmf_common_table import (
+ DynamicTable,
+ ElementIdentifiers,
+ VectorData,
+ VectorIndex,
+)
metamodel_version = "None"
@@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +84,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +106,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -231,6 +273,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
+ specifications: Optional[dict] = Field(
+ None, description="""Nested dictionary of schema specifications"""
+ )
class NWBFileStimulus(ConfiguredBaseModel):
@@ -331,10 +376,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
- lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
- None,
- description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
- )
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@@ -360,6 +401,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
+ value: Optional[Dict[str, LabMetaData]] = Field(
+ None,
+ description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
+ )
class GeneralSourceScript(ConfiguredBaseModel):
@@ -395,12 +440,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
- electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
- None, description="""Physical group of electrodes."""
- )
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
+ value: Optional[Dict[str, ElectrodeGroup]] = Field(
+ None, description="""Physical group of electrodes."""
+ )
class ExtracellularEphysElectrodes(DynamicTable):
@@ -556,9 +601,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
- intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
- None, description="""An intracellular electrode."""
- )
sweep_table: Optional[SweepTable] = Field(
None,
description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""",
@@ -583,6 +625,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""",
)
+ value: Optional[Dict[str, IntracellularElectrode]] = Field(
+ None, description="""An intracellular electrode."""
+ )
class NWBFileIntervals(ConfiguredBaseModel):
@@ -608,7 +653,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
- time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
+ value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
index 1f9c04b..a73acb9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_6_0_alpha.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -227,11 +265,29 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- value: Optional[NDArray[Shape["* num_times"], float]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -246,12 +302,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
+ data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@@ -319,12 +375,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IZeroClampSeries(CurrentClampSeries):
@@ -479,6 +555,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -486,7 +580,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class VoltageClampSeries(PatchClampSeries):
@@ -499,13 +595,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
+ data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@@ -577,27 +673,6 @@ class VoltageClampSeries(PatchClampSeries):
)
-class VoltageClampSeriesData(ConfiguredBaseModel):
- """
- Recorded current.
- """
-
- linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
-
- name: Literal["data"] = Field(
- "data",
- json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
- )
- unit: Literal["amperes"] = Field(
- "amperes",
- description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
- json_schema_extra={
- "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
- },
- )
- value: Any = Field(...)
-
-
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@@ -650,6 +725,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
+class VoltageClampSeriesData(ConfiguredBaseModel):
+ """
+ Recorded current.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["amperes"] = Field(
+ "amperes",
+ description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@@ -854,12 +970,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IntracellularElectrode(NWBContainer):
@@ -910,15 +1046,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: VectorData[NDArray[Any, int]] = Field(
- ...,
- description="""Sweep number of the PatchClampSeries in that row.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
- )
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@@ -940,6 +1067,15 @@ class SweepTable(DynamicTable):
}
},
)
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
+ ...,
+ description="""Sweep number of the PatchClampSeries in that row.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@@ -1121,11 +1257,15 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
)
+ responses: IntracellularResponsesTable = Field(
+ ..., description="""Table for storing intracellular response related metadata."""
+ )
stimuli: IntracellularStimuliTable = Field(
..., description="""Table for storing intracellular stimulus related metadata."""
)
- responses: IntracellularResponsesTable = Field(
- ..., description="""Table for storing intracellular response related metadata."""
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
)
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
@@ -1466,9 +1606,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
-VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
+VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
index af69abe..50d66f2 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import (
Image,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -73,12 +85,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -122,7 +159,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@@ -144,7 +181,7 @@ class RGBImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -174,7 +211,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -204,9 +241,7 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -220,8 +255,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -269,6 +305,47 @@ class ImageSeries(TimeSeries):
)
+class ImageSeriesData(ConfiguredBaseModel):
+ """
+ Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, * z"], float | int],
+ ]
+ ] = Field(None)
+
+
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@@ -310,9 +387,7 @@ class ImageMaskSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -326,8 +401,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -385,6 +461,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
+ data: OpticalSeriesData = Field(
+ ..., description="""Images presented to subject, either grayscale or RGB"""
+ )
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@@ -393,10 +472,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
- ] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@@ -411,8 +486,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -460,6 +536,47 @@ class OpticalSeries(ImageSeries):
)
+class OpticalSeriesData(ConfiguredBaseModel):
+ """
+ Images presented to subject, either grayscale or RGB
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
+ ]
+ ] = Field(None)
+
+
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
@@ -470,10 +587,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Index of the image (using zero-indexing) in the linked Images object.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IndexSeriesData = Field(
+ ..., description="""Index of the image (using zero-indexing) in the linked Images object."""
)
indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
@@ -530,13 +645,52 @@ class IndexSeries(TimeSeries):
)
+class IndexSeriesData(ConfiguredBaseModel):
+ """
+ Index of the image (using zero-indexing) in the linked Images object.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["N/A"] = Field(
+ "N/A",
+ description="""This field is unused by IndexSeries and has the value N/A.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
+ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
+OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
+IndexSeriesData.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
index 5c28736..e429b8c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_6_0_alpha.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -213,6 +251,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@@ -220,8 +276,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -236,10 +292,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], str] = Field(
- ...,
- description="""Annotations made during an experiment.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: AnnotationSeriesData = Field(
+ ..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@@ -278,6 +332,47 @@ class AnnotationSeries(TimeSeries):
)
+class AnnotationSeriesData(ConfiguredBaseModel):
+ """
+ Annotations made during an experiment.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], str]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@@ -288,10 +383,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Use values >0 if interval started, <0 if interval ended.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IntervalSeriesData = Field(
+ ..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@@ -330,6 +423,47 @@ class IntervalSeries(TimeSeries):
)
+class IntervalSeriesData(ConfiguredBaseModel):
+ """
+ Use values >0 if interval started, <0 if interval ended.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -417,24 +551,44 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
- None,
- json_schema_extra={
- "linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_times"},
- {"alias": "num_channels"},
- {"alias": "num_bands"},
- ]
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
+ Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_times"},
+ {"alias": "num_channels"},
+ {"alias": "num_bands"},
+ ]
+ }
}
- }
- },
+ },
+ )
)
@@ -504,9 +658,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
- spike_times_index: Optional[Named[VectorIndex]] = Field(
+ electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
- description="""Index into the spike_times dataset.""",
+ description="""Electrode group that each spike unit came from.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
+ electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ None,
+ description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -516,12 +679,9 @@ class Units(DynamicTable):
}
},
)
- spike_times: Optional[UnitsSpikeTimes] = Field(
- None, description="""Spike times for each unit in seconds."""
- )
- obs_intervals_index: Optional[Named[VectorIndex]] = Field(
+ electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into the obs_intervals dataset.""",
+ description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -547,9 +707,9 @@ class Units(DynamicTable):
},
)
)
- electrodes_index: Optional[Named[VectorIndex]] = Field(
+ obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into electrodes.""",
+ description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -559,9 +719,12 @@ class Units(DynamicTable):
}
},
)
- electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ spike_times: Optional[UnitsSpikeTimes] = Field(
+ None, description="""Spike times for each unit in seconds."""
+ )
+ spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
+ description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -571,41 +734,15 @@ class Units(DynamicTable):
}
},
)
- electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
- None,
- description="""Electrode group that each spike unit came from.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
+ waveform_mean: Optional[UnitsWaveformMean] = Field(
+ None, description="""Spike waveform mean for each spike unit."""
)
- waveform_mean: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
- Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
- )
+ waveform_sd: Optional[UnitsWaveformSd] = Field(
+ None, description="""Spike waveform standard deviation for each spike unit."""
+ )
+ waveforms: Optional[UnitsWaveforms] = Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -671,14 +808,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
+class UnitsWaveformMean(VectorData):
+ """
+ Spike waveform mean for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_mean"] = Field(
+ "waveform_mean",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveformSd(VectorData):
+ """
+ Spike waveform standard deviation for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_sd"] = Field(
+ "waveform_sd",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveforms(VectorData):
+ """
+ Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveforms"] = Field(
+ "waveforms",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
+AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
+IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
+UnitsWaveformMean.model_rebuild()
+UnitsWaveformSd.model_rebuild()
+UnitsWaveforms.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
index 42fe82f..797d656 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -121,10 +158,8 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], float] = Field(
- ...,
- description="""Applied power for optogenetic stimulus, in watts.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: OptogeneticSeriesData = Field(
+ ..., description="""Applied power for optogenetic stimulus, in watts."""
)
site: Union[OptogeneticStimulusSite, str] = Field(
...,
@@ -172,6 +207,45 @@ class OptogeneticSeries(TimeSeries):
)
+class OptogeneticSeriesData(ConfiguredBaseModel):
+ """
+ Applied power for optogenetic stimulus, in watts.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["watts"] = Field(
+ "watts",
+ description="""Unit of measurement for data, which is fixed to 'watts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@@ -202,4 +276,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
+OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
index f6acd6c..98880c6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_6_0_alpha.core_nwb_base import (
@@ -27,7 +28,11 @@ from ...core.v2_6_0_alpha.core_nwb_base import (
TimeSeriesSync,
)
from ...core.v2_6_0_alpha.core_nwb_device import Device
-from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile
+from ...core.v2_6_0_alpha.core_nwb_image import (
+ ImageSeries,
+ ImageSeriesData,
+ ImageSeriesExternalFile,
+)
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
@@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +84,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +106,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -160,21 +202,21 @@ class OnePhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[float] = Field(
- None,
- description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
+ binning: Optional[int] = Field(
+ None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
)
exposure_time: Optional[float] = Field(
None, description="""Exposure time of the sample; often the inverse of the frequency."""
)
- binning: Optional[int] = Field(
- None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
- )
- power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
intensity: Optional[float] = Field(
None, description="""Intensity of the excitation in mW/mm^2, if known."""
)
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
+ scan_line_rate: Optional[float] = Field(
+ None,
+ description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
+ )
imaging_plane: Union[ImagingPlane, str] = Field(
...,
json_schema_extra={
@@ -184,9 +226,7 @@ class OnePhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -200,8 +240,9 @@ class OnePhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -278,9 +319,7 @@ class TwoPhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -294,8 +333,9 @@ class TwoPhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -353,9 +393,7 @@ class RoiResponseSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(..., description="""Signals from ROIs.""")
+ data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
@@ -405,6 +443,47 @@ class RoiResponseSeries(TimeSeries):
)
+class RoiResponseSeriesData(ConfiguredBaseModel):
+ """
+ Signals from ROIs.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
@@ -414,10 +493,10 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}})
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class Fluorescence(NWBDataInterface):
@@ -429,10 +508,12 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}}
+ )
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class ImageSegmentation(NWBDataInterface):
@@ -444,10 +525,13 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "ImageSegmentation",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}},
+ )
value: Optional[Dict[str, PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
- name: str = Field(...)
class PlaneSegmentation(DynamicTable):
@@ -471,6 +555,10 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
+ pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ None,
+ description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ )
pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into pixel_mask.""",
@@ -483,9 +571,9 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
None,
- description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -499,10 +587,6 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
- None,
- description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
- )
reference_images: Optional[Dict[str, ImageSeries]] = Field(
None,
description="""Image stacks that the segmentation masks apply to.""",
@@ -763,10 +847,13 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "MotionCorrection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}},
+ )
value: Optional[Dict[str, CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
- name: str = Field(...)
class CorrectedImageStack(NWBDataInterface):
@@ -802,6 +889,7 @@ class CorrectedImageStack(NWBDataInterface):
OnePhotonSeries.model_rebuild()
TwoPhotonSeries.model_rebuild()
RoiResponseSeries.model_rebuild()
+RoiResponseSeriesData.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
index 3a085f7..1982ed6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py
index 21b7046..deb4ce2 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_6_0_alpha.core_nwb_base import (
Image,
@@ -40,6 +40,7 @@ from ...core.v2_6_0_alpha.core_nwb_ecephys import (
ClusterWaveforms,
Clustering,
ElectricalSeries,
+ ElectricalSeriesData,
ElectrodeGroup,
ElectrodeGroupPosition,
EventDetection,
@@ -48,6 +49,7 @@ from ...core.v2_6_0_alpha.core_nwb_ecephys import (
FilteredEphys,
LFP,
SpikeEventSeries,
+ SpikeEventSeriesData,
)
from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals
from ...core.v2_6_0_alpha.core_nwb_file import (
@@ -102,9 +104,12 @@ from ...core.v2_6_0_alpha.core_nwb_image import (
GrayscaleImage,
ImageMaskSeries,
ImageSeries,
+ ImageSeriesData,
ImageSeriesExternalFile,
IndexSeries,
+ IndexSeriesData,
OpticalSeries,
+ OpticalSeriesData,
RGBAImage,
RGBImage,
)
@@ -112,14 +117,23 @@ from ...core.v2_6_0_alpha.core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
+ AnnotationSeriesData,
DecompositionSeries,
DecompositionSeriesBands,
DecompositionSeriesData,
IntervalSeries,
+ IntervalSeriesData,
Units,
UnitsSpikeTimes,
+ UnitsWaveformMean,
+ UnitsWaveformSd,
+ UnitsWaveforms,
+)
+from ...core.v2_6_0_alpha.core_nwb_ogen import (
+ OptogeneticSeries,
+ OptogeneticSeriesData,
+ OptogeneticStimulusSite,
)
-from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from ...core.v2_6_0_alpha.core_nwb_ophys import (
CorrectedImageStack,
DfOverF,
@@ -136,6 +150,7 @@ from ...core.v2_6_0_alpha.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
RoiResponseSeries,
+ RoiResponseSeriesData,
TwoPhotonSeries,
)
from ...core.v2_6_0_alpha.core_nwb_retinotopy import (
@@ -177,7 +192,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -187,7 +202,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -198,7 +213,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -211,6 +226,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -221,12 +248,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py
index a645a2f..6c8a7fb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py
@@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -91,12 +103,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -120,7 +157,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -359,9 +396,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
- NDArray[Shape["* x, * y"], float],
- NDArray[Shape["* x, * y, 3 r_g_b"], float],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
+ NDArray[Shape["* x, * y"], float | int],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@@ -551,13 +588,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
+ name: str = Field(...)
+ description: str = Field(
+ ..., description="""Description of this collection of processed data."""
+ )
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
- name: str = Field(...)
class Images(NWBDataInterface):
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py
index 836c2e2..d22df1e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import (
NWBDataInterface,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -176,6 +213,24 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -183,10 +238,10 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, 1 x"], float],
- NDArray[Shape["* num_times, 2 x_y"], float],
- NDArray[Shape["* num_times, 3 x_y_z"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, 1 x"], float | int],
+ NDArray[Shape["* num_times, 2 x_y"], float | int],
+ NDArray[Shape["* num_times, 3 x_y_z"], float | int],
]
] = Field(None)
@@ -200,10 +255,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEpochs",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
+ )
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
- name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@@ -215,10 +273,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralEvents",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@@ -230,10 +291,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "BehavioralTimeSeries",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class PupilTracking(NWBDataInterface):
@@ -245,10 +309,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
+ )
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
- name: str = Field(...)
class EyeTracking(NWBDataInterface):
@@ -260,10 +326,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class CompassDirection(NWBDataInterface):
@@ -275,10 +343,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "CompassDirection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
class Position(NWBDataInterface):
@@ -290,10 +361,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
+ name: str = Field(
+ "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
+ )
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py
index 59b53c8..0e96640 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import NWBContainer
@@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
index dc96a98..85141cf 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_7_0.core_nwb_base import (
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
- data: Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_channels"], float],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Recorded voltage data.""")
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
+ data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -215,6 +249,49 @@ class ElectricalSeries(TimeSeries):
)
+class ElectricalSeriesData(ConfiguredBaseModel):
+ """
+ Recorded voltage data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_channels"], float | int],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@@ -225,10 +302,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_events, * num_samples"], float],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
- ] = Field(..., description="""Spike waveforms.""")
+ data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@@ -238,6 +312,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
+ None,
+ description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
+ )
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@@ -250,11 +329,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
- None,
- description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
- )
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@@ -287,6 +361,48 @@ class SpikeEventSeries(ElectricalSeries):
)
+class SpikeEventSeriesData(ConfiguredBaseModel):
+ """
+ Spike waveforms.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["volts"] = Field(
+ "volts",
+ description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_events, * num_samples"], float | int],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
+ ]
+ ] = Field(None)
+
+
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@@ -385,10 +501,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
+ )
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
- name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@@ -400,10 +518,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field(
+ "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
+ )
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class LFP(NWBDataInterface):
@@ -415,10 +535,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
+ name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
- name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@@ -561,7 +681,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
+ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
+SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py
index e8b5539..e5cc476 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_7_0.core_nwb_base import TimeSeriesReferenceVectorData
@@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py
index 038a4ae..2b52d85 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import (
Images,
@@ -34,7 +34,12 @@ from ...core.v2_7_0.core_nwb_icephys import (
from ...core.v2_7_0.core_nwb_misc import Units
from ...core.v2_7_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_7_0.core_nwb_ophys import ImagingPlane
-from ...hdmf_common.v1_8_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
+from ...hdmf_common.v1_8_0.hdmf_common_table import (
+ DynamicTable,
+ ElementIdentifiers,
+ VectorData,
+ VectorIndex,
+)
metamodel_version = "None"
@@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +84,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +106,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -231,6 +273,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
+ specifications: Optional[dict] = Field(
+ None, description="""Nested dictionary of schema specifications"""
+ )
class NWBFileStimulus(ConfiguredBaseModel):
@@ -339,10 +384,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
- lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
- None,
- description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
- )
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@@ -368,6 +409,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
+ value: Optional[Dict[str, LabMetaData]] = Field(
+ None,
+ description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
+ )
class GeneralSourceScript(ConfiguredBaseModel):
@@ -403,12 +448,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
- electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
- None, description="""Physical group of electrodes."""
- )
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
+ value: Optional[Dict[str, ElectrodeGroup]] = Field(
+ None, description="""Physical group of electrodes."""
+ )
class ExtracellularEphysElectrodes(DynamicTable):
@@ -564,9 +609,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
- intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
- None, description="""An intracellular electrode."""
- )
sweep_table: Optional[SweepTable] = Field(
None,
description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""",
@@ -591,6 +633,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""",
)
+ value: Optional[Dict[str, IntracellularElectrode]] = Field(
+ None, description="""An intracellular electrode."""
+ )
class NWBFileIntervals(ConfiguredBaseModel):
@@ -616,7 +661,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
- time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
+ value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py
index c1818b4..7a77474 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_7_0.core_nwb_base import (
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -227,11 +265,29 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- value: Optional[NDArray[Shape["* num_times"], float]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -246,12 +302,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
+ data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@@ -319,12 +375,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IZeroClampSeries(CurrentClampSeries):
@@ -479,6 +555,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
@@ -486,7 +580,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class VoltageClampSeries(PatchClampSeries):
@@ -499,13 +595,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
- data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
+ data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@@ -577,27 +673,6 @@ class VoltageClampSeries(PatchClampSeries):
)
-class VoltageClampSeriesData(ConfiguredBaseModel):
- """
- Recorded current.
- """
-
- linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
-
- name: Literal["data"] = Field(
- "data",
- json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
- )
- unit: Literal["amperes"] = Field(
- "amperes",
- description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
- json_schema_extra={
- "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
- },
- )
- value: Any = Field(...)
-
-
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@@ -650,6 +725,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
+class VoltageClampSeriesData(ConfiguredBaseModel):
+ """
+ Recorded current.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["amperes"] = Field(
+ "amperes",
+ description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
+ )
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@@ -854,12 +970,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
- value: Any = Field(...)
+ value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
class IntracellularElectrode(NWBContainer):
@@ -910,15 +1046,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: VectorData[NDArray[Any, int]] = Field(
- ...,
- description="""Sweep number of the PatchClampSeries in that row.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
- )
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@@ -940,6 +1067,15 @@ class SweepTable(DynamicTable):
}
},
)
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
+ ...,
+ description="""Sweep number of the PatchClampSeries in that row.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@@ -1133,11 +1269,15 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
)
+ responses: IntracellularResponsesTable = Field(
+ ..., description="""Table for storing intracellular response related metadata."""
+ )
stimuli: IntracellularStimuliTable = Field(
..., description="""Table for storing intracellular stimulus related metadata."""
)
- responses: IntracellularResponsesTable = Field(
- ..., description="""Table for storing intracellular response related metadata."""
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
)
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
@@ -1478,9 +1618,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
-VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
+VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py
index 6e97172..66edd17 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import (
Image,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -73,12 +85,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -122,7 +159,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@@ -144,7 +181,7 @@ class RGBImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -174,7 +211,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
- value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
+ value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -204,9 +241,7 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -220,8 +255,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -269,6 +305,47 @@ class ImageSeries(TimeSeries):
)
+class ImageSeriesData(ConfiguredBaseModel):
+ """
+ Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, * z"], float | int],
+ ]
+ ] = Field(None)
+
+
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@@ -310,9 +387,7 @@ class ImageMaskSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -326,8 +401,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -385,6 +461,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
+ data: OpticalSeriesData = Field(
+ ..., description="""Images presented to subject, either grayscale or RGB"""
+ )
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@@ -393,10 +472,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
- ] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@@ -411,8 +486,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -460,6 +536,47 @@ class OpticalSeries(ImageSeries):
)
+class OpticalSeriesData(ConfiguredBaseModel):
+ """
+ Images presented to subject, either grayscale or RGB
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* frame, * x, * y"], float | int],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
+ ]
+ ] = Field(None)
+
+
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
@@ -470,10 +587,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Index of the image (using zero-indexing) in the linked Images object.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IndexSeriesData = Field(
+ ..., description="""Index of the image (using zero-indexing) in the linked Images object."""
)
indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
@@ -530,13 +645,52 @@ class IndexSeries(TimeSeries):
)
+class IndexSeriesData(ConfiguredBaseModel):
+ """
+ Index of the image (using zero-indexing) in the linked Images object.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""")
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""This field is unused by IndexSeries.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["N/A"] = Field(
+ "N/A",
+ description="""This field is unused by IndexSeries and has the value N/A.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
+ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
+OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
+IndexSeriesData.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py
index 1eb2c3a..513f113 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_7_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -213,6 +251,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@@ -220,8 +276,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
- NDArray[Shape["* num_times"], float],
- NDArray[Shape["* num_times, * num_features"], float],
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@@ -236,10 +292,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], str] = Field(
- ...,
- description="""Annotations made during an experiment.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: AnnotationSeriesData = Field(
+ ..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@@ -278,6 +332,47 @@ class AnnotationSeries(TimeSeries):
)
+class AnnotationSeriesData(ConfiguredBaseModel):
+ """
+ Annotations made during an experiment.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], str]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@@ -288,10 +383,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], int] = Field(
- ...,
- description="""Use values >0 if interval started, <0 if interval ended.""",
- json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
+ data: IntervalSeriesData = Field(
+ ..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@@ -330,6 +423,47 @@ class IntervalSeries(TimeSeries):
)
+class IntervalSeriesData(ConfiguredBaseModel):
+ """
+ Use values >0 if interval started, <0 if interval ended.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: float = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
+ le=-1,
+ ge=-1,
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["n/a"] = Field(
+ "n/a",
+ description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
+ )
+ value: Optional[NDArray[Shape["* num_times"], int]] = Field(
+ None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
+ )
+
+
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -417,24 +551,44 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
- None,
- json_schema_extra={
- "linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_times"},
- {"alias": "num_channels"},
- {"alias": "num_bands"},
- ]
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
+ Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_times"},
+ {"alias": "num_channels"},
+ {"alias": "num_bands"},
+ ]
+ }
}
- }
- },
+ },
+ )
)
@@ -504,9 +658,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
- spike_times_index: Optional[Named[VectorIndex]] = Field(
+ electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
- description="""Index into the spike_times dataset.""",
+ description="""Electrode group that each spike unit came from.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
+ }
+ },
+ )
+ electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ None,
+ description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -516,12 +679,9 @@ class Units(DynamicTable):
}
},
)
- spike_times: Optional[UnitsSpikeTimes] = Field(
- None, description="""Spike times for each unit in seconds."""
- )
- obs_intervals_index: Optional[Named[VectorIndex]] = Field(
+ electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into the obs_intervals dataset.""",
+ description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -547,9 +707,9 @@ class Units(DynamicTable):
},
)
)
- electrodes_index: Optional[Named[VectorIndex]] = Field(
+ obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Index into electrodes.""",
+ description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -559,9 +719,12 @@ class Units(DynamicTable):
}
},
)
- electrodes: Optional[Named[DynamicTableRegion]] = Field(
+ spike_times: Optional[UnitsSpikeTimes] = Field(
+ None, description="""Spike times for each unit in seconds."""
+ )
+ spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
- description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
+ description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@@ -571,41 +734,15 @@ class Units(DynamicTable):
}
},
)
- electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
- None,
- description="""Electrode group that each spike unit came from.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
- }
- },
+ waveform_mean: Optional[UnitsWaveformMean] = Field(
+ None, description="""Spike waveform mean for each spike unit."""
)
- waveform_mean: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- VectorData[
- Union[
- NDArray[Shape["* num_units, * num_samples"], float],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
- ]
- ]
- ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
- Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
- )
+ waveform_sd: Optional[UnitsWaveformSd] = Field(
+ None, description="""Spike waveform standard deviation for each spike unit."""
+ )
+ waveforms: Optional[UnitsWaveforms] = Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -671,14 +808,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
+class UnitsWaveformMean(VectorData):
+ """
+ Spike waveform mean for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_mean"] = Field(
+ "waveform_mean",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveformSd(VectorData):
+ """
+ Spike waveform standard deviation for each spike unit.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveform_sd"] = Field(
+ "waveform_sd",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class UnitsWaveforms(VectorData):
+ """
+ Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
+
+ name: Literal["waveforms"] = Field(
+ "waveforms",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
+ },
+ )
+ sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
+ unit: Optional[Literal["volts"]] = Field(
+ "volts",
+ description="""Unit of measurement. This value is fixed to 'volts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
+AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
+IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
+UnitsWaveformMean.model_rebuild()
+UnitsWaveformSd.model_rebuild()
+UnitsWaveforms.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py
index 626a28c..8e6d7c3 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import (
NWBContainer,
@@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -121,9 +158,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(
+ data: OptogeneticSeriesData = Field(
...,
description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""",
)
@@ -173,6 +208,48 @@ class OptogeneticSeries(TimeSeries):
)
+class OptogeneticSeriesData(ConfiguredBaseModel):
+ """
+ Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: Literal["watts"] = Field(
+ "watts",
+ description="""Unit of measurement for data, which is fixed to 'watts'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@@ -203,4 +280,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
+OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py
index d462064..5fdf2a0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
+ model_validator,
)
from ...core.v2_7_0.core_nwb_base import (
@@ -27,7 +28,7 @@ from ...core.v2_7_0.core_nwb_base import (
TimeSeriesSync,
)
from ...core.v2_7_0.core_nwb_device import Device
-from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
+from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile
from ...hdmf_common.v1_8_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
@@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -160,21 +198,21 @@ class OnePhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[float] = Field(
- None,
- description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
+ binning: Optional[int] = Field(
+ None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
)
exposure_time: Optional[float] = Field(
None, description="""Exposure time of the sample; often the inverse of the frequency."""
)
- binning: Optional[int] = Field(
- None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
- )
- power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
intensity: Optional[float] = Field(
None, description="""Intensity of the excitation in mW/mm^2, if known."""
)
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
+ scan_line_rate: Optional[float] = Field(
+ None,
+ description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
+ )
imaging_plane: Union[ImagingPlane, str] = Field(
...,
json_schema_extra={
@@ -184,9 +222,7 @@ class OnePhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -200,8 +236,9 @@ class OnePhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -278,9 +315,7 @@ class TwoPhotonSeries(ImageSeries):
}
},
)
- data: Union[
- NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
- ] = Field(
+ data: ImageSeriesData = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
@@ -294,8 +329,9 @@ class TwoPhotonSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
- None,
+ "raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@@ -353,9 +389,7 @@ class RoiResponseSeries(TimeSeries):
)
name: str = Field(...)
- data: Union[
- NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
- ] = Field(..., description="""Signals from ROIs.""")
+ data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
@@ -405,6 +439,47 @@ class RoiResponseSeries(TimeSeries):
)
+class RoiResponseSeriesData(ConfiguredBaseModel):
+ """
+ Signals from ROIs.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ offset: Optional[float] = Field(
+ None,
+ description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
+ )
+ resolution: Optional[float] = Field(
+ -1.0,
+ description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
+ )
+ unit: str = Field(
+ ...,
+ description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ )
+ continuity: Optional[str] = Field(
+ None,
+ description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* num_times"], float | int],
+ NDArray[Shape["* num_times, * num_rois"], float | int],
+ ]
+ ] = Field(None)
+
+
class DfOverF(NWBDataInterface):
"""
dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
@@ -414,10 +489,10 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}})
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class Fluorescence(NWBDataInterface):
@@ -429,10 +504,12 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}}
+ )
value: Optional[Dict[str, RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
- name: str = Field(...)
class ImageSegmentation(NWBDataInterface):
@@ -444,10 +521,13 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "ImageSegmentation",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}},
+ )
value: Optional[Dict[str, PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
- name: str = Field(...)
class PlaneSegmentation(DynamicTable):
@@ -471,6 +551,10 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
+ pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ None,
+ description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ )
pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into pixel_mask.""",
@@ -483,9 +567,9 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
+ voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
None,
- description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
+ description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None,
@@ -499,10 +583,6 @@ class PlaneSegmentation(DynamicTable):
}
},
)
- voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
- None,
- description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
- )
reference_images: Optional[Dict[str, ImageSeries]] = Field(
None,
description="""Image stacks that the segmentation masks apply to.""",
@@ -763,10 +843,13 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
+ name: str = Field(
+ "MotionCorrection",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}},
+ )
value: Optional[Dict[str, CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
- name: str = Field(...)
class CorrectedImageStack(NWBDataInterface):
@@ -802,6 +885,7 @@ class CorrectedImageStack(NWBDataInterface):
OnePhotonSeries.model_rebuild()
TwoPhotonSeries.model_rebuild()
RoiResponseSeries.model_rebuild()
+RoiResponseSeriesData.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
index 26f2f92..8affbfc 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import NWBDataInterface
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py
index 5747cde..68e374e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_7_0.core_nwb_base import (
Image,
@@ -40,6 +40,7 @@ from ...core.v2_7_0.core_nwb_ecephys import (
ClusterWaveforms,
Clustering,
ElectricalSeries,
+ ElectricalSeriesData,
ElectrodeGroup,
ElectrodeGroupPosition,
EventDetection,
@@ -48,6 +49,7 @@ from ...core.v2_7_0.core_nwb_ecephys import (
FilteredEphys,
LFP,
SpikeEventSeries,
+ SpikeEventSeriesData,
)
from ...core.v2_7_0.core_nwb_epoch import TimeIntervals
from ...core.v2_7_0.core_nwb_file import (
@@ -102,9 +104,12 @@ from ...core.v2_7_0.core_nwb_image import (
GrayscaleImage,
ImageMaskSeries,
ImageSeries,
+ ImageSeriesData,
ImageSeriesExternalFile,
IndexSeries,
+ IndexSeriesData,
OpticalSeries,
+ OpticalSeriesData,
RGBAImage,
RGBImage,
)
@@ -112,14 +117,23 @@ from ...core.v2_7_0.core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
+ AnnotationSeriesData,
DecompositionSeries,
DecompositionSeriesBands,
DecompositionSeriesData,
IntervalSeries,
+ IntervalSeriesData,
Units,
UnitsSpikeTimes,
+ UnitsWaveformMean,
+ UnitsWaveformSd,
+ UnitsWaveforms,
+)
+from ...core.v2_7_0.core_nwb_ogen import (
+ OptogeneticSeries,
+ OptogeneticSeriesData,
+ OptogeneticStimulusSite,
)
-from ...core.v2_7_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from ...core.v2_7_0.core_nwb_ophys import (
CorrectedImageStack,
DfOverF,
@@ -136,6 +150,7 @@ from ...core.v2_7_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
RoiResponseSeries,
+ RoiResponseSeriesData,
TwoPhotonSeries,
)
from ...core.v2_7_0.core_nwb_retinotopy import (
@@ -178,7 +193,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -188,7 +203,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -199,7 +214,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -212,6 +227,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -222,12 +249,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
index 56af1b8..c6e3962 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -64,12 +76,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
index e52b294..4b7bebf 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -88,12 +100,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -117,7 +154,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -168,7 +205,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -260,7 +297,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -316,7 +353,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -571,13 +608,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -634,24 +677,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -887,7 +933,7 @@ class Index(Data):
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -900,7 +946,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
description: str = Field(..., description="""Description of what these vectors represent.""")
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -915,7 +961,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -933,7 +979,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -963,7 +1009,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py
index dcc5707..37df218 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_0.hdmf_common_sparse import (
CSRMatrix,
@@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +92,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
index 8ce7f43..3ad3d86 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -64,12 +76,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
index 9065b81..df885a6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -88,12 +100,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -117,7 +154,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -168,7 +205,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -260,7 +297,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -316,7 +353,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -571,13 +608,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -634,24 +677,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -887,7 +933,7 @@ class Index(Data):
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -900,7 +946,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
description: str = Field(..., description="""Description of what these vectors represent.""")
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -915,7 +961,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -933,7 +979,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -963,7 +1009,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py
index 0f66985..c65b595 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_2.hdmf_common_sparse import (
CSRMatrix,
@@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +92,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
index c0f7fcc..51a407e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -64,12 +76,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
index 749fab9..454e1eb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
@@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -88,12 +100,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -117,7 +154,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -168,7 +205,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -260,7 +297,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -316,7 +353,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -571,13 +608,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -634,24 +677,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -887,7 +933,7 @@ class Index(Data):
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -901,7 +947,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -919,7 +965,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -937,7 +983,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -975,7 +1021,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py
index c505d77..5c85466 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_sparse import (
CSRMatrix,
@@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -80,12 +92,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py
index 656629d..14abdac 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py
index 13824fe..3e588f1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -64,12 +76,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py
index fdd6bcc..e7458c5 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -970,7 +1016,7 @@ class VocabData(VectorData):
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py
index 25e5651..f8b7421 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_2_0.hdmf_common_base import Container, Data
from ...hdmf_common.v1_2_0.hdmf_common_sparse import (
@@ -35,7 +35,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -69,6 +69,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -79,12 +91,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py
index affaa59..1900b5a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py
index 01484f3..e76ebb6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_2_1.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py
index cc9029d..129bd39 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -970,7 +1016,7 @@ class VocabData(VectorData):
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py
index 1338679..9c30289 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_2_1.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_2_1.hdmf_common_sparse import (
@@ -35,7 +35,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -69,6 +69,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -79,12 +91,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py
index a7ed66d..73dfb25 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py
index 4d4850c..3405369 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py
index 2620eb6..718593e 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_3_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py
index a55c212..1fe61d6 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -970,7 +1016,7 @@ class VocabData(VectorData):
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py
index 040adf7..7988815 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_3_0.hdmf_common_resources import (
@@ -37,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -71,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -81,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py
index 02d67bf..11ae3a5 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py
index ad70998..018a821 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_4_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py
index a730ec1..43b0dbb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py
index 0a85a76..fb4e7ae 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -73,12 +85,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
index 7c62f93..6226eb1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
index d434cd9..7b50f61 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
index 27a287c..37d0124 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -975,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
+ name: str = Field(...)
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
+ )
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py
index 6e04fd0..8d5c266 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -74,12 +86,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py
index df22948..6610cdc 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py
index 4e921cc..b3d639b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_1.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py
index 3112a4f..e318d77 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -975,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
+ name: str = Field(...)
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
+ )
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py
index fa4ea72..f9021e5 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -74,12 +86,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py
index 57c9079..86feeaa 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py
index 73a6043..23b5fe0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_6_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py
index 0759b51..bccef50 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -975,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
+ name: str = Field(...)
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
+ )
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py
index 981e600..a83f1a7 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -74,12 +86,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py
index e785e04..54ccd77 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py
index b2fe190..01afd96 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_7_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py
index e805fe7..2ee12e7 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -975,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
+ name: str = Field(...)
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
+ )
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py
index 4aaa46d..ac47d34 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -74,12 +86,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
index 7731368..2d2b0b9 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -63,12 +75,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -134,10 +171,10 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
+ name: str = Field(...)
value: Optional[Dict[str, Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
- name: str = Field(...)
# Model rebuild
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
index 7a3e72c..1aad5a2 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_8_0.hdmf_common_base import Container
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
index 8f0d610..b779c48 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
@@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -90,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -119,7 +156,7 @@ NUMPYDANTIC_VERSION = "1.2.1"
T = TypeVar("T", bound=NDArray)
-class VectorDataMixin(BaseModel, Generic[T]):
+class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@@ -170,7 +207,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
-class VectorIndexMixin(BaseModel, Generic[T]):
+class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@@ -262,7 +299,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
-class DynamicTableRegionMixin(BaseModel):
+class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@@ -318,7 +355,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
-class DynamicTableMixin(BaseModel):
+class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@@ -573,13 +610,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
- title=f"field {key} cannot be cast to VectorData from {val}",
+ title="cast_extra_columns",
line_errors=[
{
- "type": "ValueError",
- "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "type": "value_error",
"input": val,
- }
+ "loc": ("DynamicTableMixin", "cast_extra_columns"),
+ "ctx": {
+ "error": ValueError(
+ f"field {key} cannot be cast to {to_cast} from {val}"
+ )
+ },
+ },
+ *e.errors(),
],
) from e
return model
@@ -636,24 +679,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
- if type(annotation).__name__ == "_UnionGenericAlias":
+ while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
+ description = cls.model_fields[info.field_name].description
+ description = description if description is not None else ""
+
return handler(
annotation(
val,
name=info.field_name,
- description=cls.model_fields[info.field_name].description,
+ description=description,
)
)
except Exception:
raise e from None
-class AlignedDynamicTableMixin(BaseModel):
+class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@@ -862,7 +908,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(VectorDataMixin, ConfiguredBaseModel):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -876,7 +922,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel):
value: Optional[T] = Field(None)
-class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -900,7 +946,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel):
] = Field(None)
-class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
+class ElementIdentifiers(ElementIdentifiersMixin, Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
@@ -918,7 +964,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel):
)
-class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -928,12 +974,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
)
name: str = Field(...)
- table: DynamicTable = Field(
- ..., description="""Reference to the DynamicTable object that this region applies to."""
- )
description: str = Field(
..., description="""Description of what this table region points to."""
)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
@@ -944,7 +990,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode
] = Field(None)
-class DynamicTable(DynamicTableMixin, ConfiguredBaseModel):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -975,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
+ name: str = Field(...)
+ categories: List[str] = Field(
+ ...,
+ description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""",
+ )
value: Optional[Dict[str, DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py
index dd09b7f..64c8e43 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix
@@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -74,12 +86,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
index ad617da..0eab9b0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
index cda720e..a4e00af 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
index 3429a1e..5e1eaba 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix
@@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -48,7 +48,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -82,12 +94,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py
index 1a88edc..2acca00 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py
index 8d5af36..fd7d447 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py
index c697f83..de71e60 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -73,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -83,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py
index cbd0ad9..425f2b4 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py
index 9f337fa..0a72ec0 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py
index e1a12ca..4e09fe1 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix
@@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -73,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -83,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py
index 0551cfd..c36225c 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py
index 09e6f05..e21325f 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py
index c904202..e82d0dc 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
index 714ae52..6aaf19b 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
index d3132cd..8acfc1a 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
@@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data
@@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
index 281e5b2..46184eb 100644
--- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
+++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
@@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data, SimpleMultiContainer
from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix
@@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
- extra="allow",
+ extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
- def __getitem__(self, val: Union[int, slice]) -> Any:
+ def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
- def coerce_value(cls, v: Any, handler) -> Any:
+ def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_with_value(cls, v: Any, handler, info) -> Any:
+ """Try to rescue instantiation by casting into the model's value field"""
+ try:
+ return handler(v)
+ except Exception as e1:
+ try:
+ return handler({"value": v})
+ except Exception:
+ raise e1
+
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
- v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ if v.__pydantic_extra__:
+ v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
+ else:
+ v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
+ @model_validator(mode="before")
+ @classmethod
+ def gather_extra_to_value(cls, v: Any) -> Any:
+ """
+ For classes that don't allow extra fields and have a value slot,
+ pack those extra kwargs into ``value``
+ """
+ if (
+ cls.model_config["extra"] == "forbid"
+ and "value" in cls.model_fields
+ and isinstance(v, dict)
+ ):
+ extras = {key: val for key, val in v.items() if key not in cls.model_fields}
+ if extras:
+ for k in extras:
+ del v[k]
+ if "value" in v:
+ v["value"].update(extras)
+ else:
+ v["value"] = extras
+ return v
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml
index b11b02c..1771272 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml
@@ -124,7 +124,6 @@ classes:
external file.
range: TimeSeries__data
required: true
- multivalued: false
inlined: true
starting_time:
name: starting_time
@@ -132,8 +131,6 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
range: TimeSeries__starting_time
- required: false
- multivalued: false
inlined: true
timestamps:
name: timestamps
@@ -176,8 +173,6 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
range: TimeSeries__sync
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
tree_root: true
@@ -309,13 +304,24 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: NWBDataInterface
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of this collection of processed data.
+ range: text
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: NWBDataInterface
+ - range: DynamicTable
tree_root: true
Images:
name: Images
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
index f63c218..4b3440f 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
@@ -38,14 +38,11 @@ classes:
reference frame.
range: SpatialSeries__data
required: true
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
range: text
- required: false
- multivalued: false
tree_root: true
SpatialSeries__data:
name: SpatialSeries__data
@@ -59,6 +56,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
@@ -94,12 +127,19 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: IntervalSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEpochs)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: IntervalSeries
tree_root: true
BehavioralEvents:
name: BehavioralEvents
@@ -107,12 +147,19 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEvents)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
BehavioralTimeSeries:
name: BehavioralTimeSeries
@@ -120,36 +167,57 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralTimeSeries)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(PupilTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(EyeTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
CompassDirection:
name: CompassDirection
@@ -160,22 +228,36 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(CompassDirection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(Position)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
index 7a93461..9b4593c 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
@@ -39,40 +39,6 @@ classes:
about the filter properties as possible.
range: text
required: false
- data:
- name: data
- description: Recorded voltage data.
- range: numeric
- required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - alias: num_samples
- electrodes:
- name: electrodes
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: DynamicTableRegion pointer to the electrodes that this time series
- was generated from.
- range: DynamicTableRegion
- required: true
- multivalued: false
- inlined: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
@@ -90,7 +56,100 @@ classes:
range: float32
required: false
multivalued: false
+ data:
+ name: data
+ description: Recorded voltage data.
+ range: ElectricalSeries__data
+ required: true
+ inlined: true
+ electrodes:
+ name: electrodes
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: DynamicTableRegion pointer to the electrodes that this time series
+ was generated from.
+ range: DynamicTableRegion
+ required: true
+ inlined: true
tree_root: true
+ ElectricalSeries__data:
+ name: ElectricalSeries__data
+ description: Recorded voltage data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. This value
+ is fixed to 'volts'. Actual stored values are not necessarily stored in
+ these units. To access the data in these units, multiply 'data' by 'conversion'
+ and 'channel_conversion' (if present).
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - alias: num_samples
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@@ -111,19 +170,9 @@ classes:
data:
name: data
description: Spike waveforms.
- range: numeric
+ range: SpikeEventSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_events
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_events
- - alias: num_channels
- - alias: num_samples
+ inlined: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
@@ -137,6 +186,73 @@ classes:
required: true
multivalued: false
tree_root: true
+ SpikeEventSeries__data:
+ name: SpikeEventSeries__data
+ description: Spike waveforms.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for waveforms, which is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_samples
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_channels
+ - alias: num_samples
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
@@ -192,7 +308,6 @@ classes:
was generated from.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
EventDetection:
@@ -212,7 +327,6 @@ classes:
or dV/dT threshold, as well as relevant values.
range: text
required: true
- multivalued: false
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
@@ -241,7 +355,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ElectricalSeries
@@ -254,12 +367,19 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpikeEventSeries
+ name:
+ name: name
+ ifabsent: string(EventWaveform)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpikeEventSeries
tree_root: true
FilteredEphys:
name: FilteredEphys
@@ -276,12 +396,19 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(FilteredEphys)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
LFP:
name: LFP
@@ -290,12 +417,19 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(LFP)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
ElectrodeGroup:
name: ElectrodeGroup
@@ -323,8 +457,6 @@ classes:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
- required: false
- multivalued: false
inlined: true
device:
name: device
@@ -333,7 +465,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -356,24 +487,18 @@ classes:
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
y:
name: y
description: y coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
z:
name: z
description: z coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@@ -395,7 +520,6 @@ classes:
description: Filtering applied to data before generating mean/sd
range: text
required: true
- multivalued: false
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
@@ -427,7 +551,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Clustering
@@ -451,7 +574,6 @@ classes:
clusters curated using Klusters, etc)
range: text
required: true
- multivalued: false
num:
name: num
description: Cluster number of each event
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
index 4eb778d..c095b7b 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
@@ -63,15 +63,11 @@ classes:
value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
timeseries:
name: timeseries
description: An index into a TimeSeries object.
range: TimeIntervals__timeseries
- required: false
- multivalued: false
inlined: true
timeseries_index:
name: timeseries_index
@@ -84,8 +80,6 @@ classes:
value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
TimeIntervals__timeseries:
@@ -108,8 +102,6 @@ classes:
array:
exact_number_dimensions: 1
range: int32
- required: false
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -117,14 +109,10 @@ classes:
array:
exact_number_dimensions: 1
range: int32
- required: false
- multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
array:
exact_number_dimensions: 1
range: TimeSeries
- required: false
- multivalued: false
inlined: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml
index a3eb463..e998eab 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml
@@ -81,13 +81,11 @@ classes:
other files.
range: text
required: true
- multivalued: false
session_description:
name: session_description
description: A description of the experimental session and data in the file.
range: text
required: true
- multivalued: false
session_start_time:
name: session_start_time
description: 'Date and time of the experiment/session start. The date is stored
@@ -96,7 +94,6 @@ classes:
offset. Date accuracy is up to milliseconds.'
range: isodatetime
required: true
- multivalued: false
timestamps_reference_time:
name: timestamps_reference_time
description: 'Date and time corresponding to time zero of all timestamps.
@@ -106,7 +103,6 @@ classes:
times stored in the file use this time as reference (i.e., time zero).'
range: isodatetime
required: true
- multivalued: false
acquisition:
name: acquisition
description: Data streams recorded from the system, including ephys, ophys,
@@ -185,7 +181,6 @@ classes:
can exist in the present file or can be linked to a remote library file.
range: NWBFile__stimulus
required: true
- multivalued: false
inlined: true
inlined_as_list: true
general:
@@ -207,7 +202,6 @@ classes:
should not be created unless there is data to store within them.
range: NWBFile__general
required: true
- multivalued: false
inlined: true
inlined_as_list: true
intervals:
@@ -217,18 +211,18 @@ classes:
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
range: NWBFile__intervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
units:
name: units
description: Data about sorted spike units.
range: Units
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
+ specifications:
+ name: specifications
+ description: Nested dictionary of schema specifications
+ range: dict
tree_root: true
NWBFile__stimulus:
name: NWBFile__stimulus
@@ -299,14 +293,10 @@ classes:
name: data_collection
description: Notes about data collection and analysis.
range: text
- required: false
- multivalued: false
experiment_description:
name: experiment_description
description: General description of the experiment.
range: text
- required: false
- multivalued: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
@@ -321,8 +311,6 @@ classes:
name: institution
description: Institution(s) where experiment was performed.
range: text
- required: false
- multivalued: false
keywords:
name: keywords
description: Terms to search over.
@@ -336,28 +324,20 @@ classes:
name: lab
description: Laboratory where experiment was performed.
range: text
- required: false
- multivalued: false
notes:
name: notes
description: Notes about the experiment.
range: text
- required: false
- multivalued: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
range: text
- required: false
- multivalued: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
range: text
- required: false
- multivalued: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
@@ -371,52 +351,31 @@ classes:
name: session_id
description: Lab-specific ID for the session.
range: text
- required: false
- multivalued: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
range: text
- required: false
- multivalued: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
range: general__source_script
- required: false
- multivalued: false
inlined: true
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
range: text
- required: false
- multivalued: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
range: text
- required: false
- multivalued: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
range: text
- required: false
- multivalued: false
- lab_meta_data:
- name: lab_meta_data
- description: Place-holder than can be extended so that lab-specific meta-data
- can be placed in /general.
- range: LabMetaData
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
@@ -431,24 +390,18 @@ classes:
description: Information about the animal or person from which the data was
measured.
range: Subject
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
range: general__extracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
range: general__intracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
optogenetics:
@@ -467,6 +420,14 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
+ value:
+ name: value
+ description: Place-holder than can be extended so that lab-specific meta-data
+ can be placed in /general.
+ range: LabMetaData
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
general__source_script:
name: general__source_script
description: Script file or link to public source code used to create this NWB
@@ -499,22 +460,19 @@ classes:
range: string
required: true
equals_string: extracellular_ephys
- electrode_group:
- name: electrode_group
- description: Physical group of electrodes.
- range: ElectrodeGroup
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
range: extracellular_ephys__electrodes
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
+ value:
+ name: value
+ description: Physical group of electrodes.
+ range: ElectrodeGroup
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
extracellular_ephys__electrodes:
name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
@@ -656,22 +614,17 @@ classes:
frequency fall-off, etc. If this changes between TimeSeries, filter description
should be stored as a text attribute for each TimeSeries.
range: text
- required: false
- multivalued: false
- intracellular_electrode:
- name: intracellular_electrode
- description: An intracellular electrode.
- range: IntracellularElectrode
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
sweep_table:
name: sweep_table
description: The table which groups different PatchClampSeries together.
range: SweepTable
- required: false
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ value:
+ name: value
+ description: An intracellular electrode.
+ range: IntracellularElectrode
+ multivalued: true
inlined: true
inlined_as_list: false
NWBFile__intervals:
@@ -692,32 +645,25 @@ classes:
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
- time_intervals:
- name: time_intervals
+ value:
+ name: value
description: Optional additional table(s) for describing other experimental
time intervals.
range: TimeIntervals
- required: false
multivalued: true
inlined: true
inlined_as_list: false
@@ -746,56 +692,38 @@ classes:
name: age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
range: text
- required: false
- multivalued: false
date_of_birth:
name: date_of_birth
description: Date of birth of subject. Can be supplied instead of 'age'.
range: isodatetime
- required: false
- multivalued: false
description:
name: description
description: Description of subject and where subject came from (e.g., breeder,
if animal).
range: text
- required: false
- multivalued: false
genotype:
name: genotype
description: Genetic strain. If absent, assume Wild Type (WT).
range: text
- required: false
- multivalued: false
sex:
name: sex
description: Gender of subject.
range: text
- required: false
- multivalued: false
species:
name: species
description: Species of subject.
range: text
- required: false
- multivalued: false
strain:
name: strain
description: Strain of subject.
range: text
- required: false
- multivalued: false
subject_id:
name: subject_id
description: ID of animal/person used/participating in experiment (lab convention).
range: text
- required: false
- multivalued: false
weight:
name: weight
description: Weight at time of experiment, at time of surgery and at other
important times.
range: text
- required: false
- multivalued: false
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
index 26823be..e37c11d 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
@@ -41,15 +41,12 @@ classes:
description: Recorded voltage or current.
range: PatchClampSeries__data
required: true
- multivalued: false
inlined: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
range: float32
- required: false
- multivalued: false
electrode:
name: electrode
annotations:
@@ -57,7 +54,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: IntracellularElectrode
@@ -74,6 +70,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -99,31 +131,24 @@ classes:
identifier: true
range: string
required: true
+ bias_current:
+ name: bias_current
+ description: Bias current, in amps.
+ range: float32
+ bridge_balance:
+ name: bridge_balance
+ description: Bridge balance, in ohms.
+ range: float32
+ capacitance_compensation:
+ name: capacitance_compensation
+ description: Capacitance compensation, in farads.
+ range: float32
data:
name: data
description: Recorded voltage.
range: CurrentClampSeries__data
required: true
- multivalued: false
inlined: true
- bias_current:
- name: bias_current
- description: Bias current, in amps.
- range: float32
- required: false
- multivalued: false
- bridge_balance:
- name: bridge_balance
- description: Bridge balance, in ohms.
- range: float32
- required: false
- multivalued: false
- capacitance_compensation:
- name: capacitance_compensation
- description: Capacitance compensation, in farads.
- range: float32
- required: false
- multivalued: false
tree_root: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
@@ -136,6 +161,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -147,8 +208,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@@ -175,19 +238,16 @@ classes:
description: Bias current, in amps, fixed to 0.0.
range: float32
required: true
- multivalued: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
range: float32
required: true
- multivalued: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
range: float32
required: true
- multivalued: false
tree_root: true
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
@@ -204,7 +264,6 @@ classes:
description: Stimulus current applied.
range: CurrentClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
CurrentClampStimulusSeries__data:
@@ -218,6 +277,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -229,8 +324,10 @@ classes:
equals_string: amperes
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@@ -243,87 +340,48 @@ classes:
identifier: true
range: string
required: true
- data:
- name: data
- description: Recorded current.
- range: VoltageClampSeries__data
- required: true
- multivalued: false
- inlined: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
range: VoltageClampSeries__capacitance_fast
- required: false
- multivalued: false
inlined: true
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
range: VoltageClampSeries__capacitance_slow
- required: false
- multivalued: false
+ inlined: true
+ data:
+ name: data
+ description: Recorded current.
+ range: VoltageClampSeries__data
+ required: true
inlined: true
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
range: VoltageClampSeries__resistance_comp_bandwidth
- required: false
- multivalued: false
inlined: true
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
range: VoltageClampSeries__resistance_comp_correction
- required: false
- multivalued: false
inlined: true
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
range: VoltageClampSeries__resistance_comp_prediction
- required: false
- multivalued: false
inlined: true
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
range: VoltageClampSeries__whole_cell_capacitance_comp
- required: false
- multivalued: false
inlined: true
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
range: VoltageClampSeries__whole_cell_series_resistance_comp
- required: false
- multivalued: false
inlined: true
tree_root: true
- VoltageClampSeries__data:
- name: VoltageClampSeries__data
- description: Recorded current.
- attributes:
- name:
- name: name
- ifabsent: string(data)
- identifier: true
- range: string
- required: true
- equals_string: data
- unit:
- name: unit
- description: Base unit of measurement for working with the data. which is
- fixed to 'amperes'. Actual stored values are not necessarily stored in these
- units. To access the data in these units, multiply 'data' by 'conversion'.
- ifabsent: string(amperes)
- range: text
- required: true
- equals_string: amperes
- value:
- name: value
- range: AnyType
- required: true
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
@@ -368,6 +426,68 @@ classes:
name: value
range: float32
required: true
+ VoltageClampSeries__data:
+ name: VoltageClampSeries__data
+ description: Recorded current.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. which is
+ fixed to 'amperes'. Actual stored values are not necessarily stored in these
+ units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
+ range: text
+ required: true
+ equals_string: amperes
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
@@ -498,7 +618,6 @@ classes:
description: Stimulus voltage applied.
range: VoltageClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
VoltageClampStimulusSeries__data:
@@ -512,6 +631,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -523,8 +678,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@@ -540,45 +697,32 @@ classes:
description: Description of electrode (e.g., whole-cell, sharp, etc.).
range: text
required: true
- multivalued: false
filtering:
name: filtering
description: Electrode specific filtering.
range: text
- required: false
- multivalued: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
range: text
- required: false
- multivalued: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
range: text
- required: false
- multivalued: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
range: text
- required: false
- multivalued: false
seal:
name: seal
description: Information about seal used for recording.
range: text
- required: false
- multivalued: false
slice:
name: slice
description: Information about slice used for recording.
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
@@ -586,7 +730,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -602,15 +745,6 @@ classes:
identifier: true
range: string
required: true
- sweep_number:
- name: sweep_number
- description: Sweep number of the PatchClampSeries in that row.
- array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: uint32
- required: true
- multivalued: false
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
@@ -633,6 +767,14 @@ classes:
description: Index for series.
range: VectorIndex
required: true
- multivalued: false
inlined: true
+ sweep_number:
+ name: sweep_number
+ description: Sweep number of the PatchClampSeries in that row.
+ array:
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: uint32
+ required: true
+ multivalued: false
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml
index adfab1b..28b17e1 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml
@@ -90,21 +90,8 @@ classes:
data:
name: data
description: Binary data representing images across frames.
- range: numeric
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - alias: z
+ range: ImageSeries__data
+ inlined: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
@@ -122,8 +109,6 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
range: ImageSeries__external_file
- required: false
- multivalued: false
inlined: true
format:
name: format
@@ -131,22 +116,88 @@ classes:
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
+ ifabsent: string(raw)
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Device
- range: string
tree_root: true
+ ImageSeries__data:
+ name: ImageSeries__data
+ description: Binary data representing images across frames.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - alias: z
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
@@ -205,7 +256,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -225,12 +275,16 @@ classes:
identifier: true
range: string
required: true
+ data:
+ name: data
+ description: Images presented to subject, either grayscale or RGB
+ range: OpticalSeries__data
+ required: true
+ inlined: true
distance:
name: distance
description: Distance from camera/monitor to target/eye.
range: float32
- required: false
- multivalued: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -246,12 +300,69 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
- data:
- name: data
- description: Images presented to subject, either grayscale or RGB
- range: numeric
+ orientation:
+ name: orientation
+ description: Description of image relative to some reference frame (e.g.,
+ which way is up). Must also specify frame of reference.
+ range: text
+ tree_root: true
+ OpticalSeries__data:
+ name: OpticalSeries__data
+ description: Images presented to subject, either grayscale or RGB
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
required: true
- multivalued: false
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -265,14 +376,6 @@ classes:
- alias: y
- alias: r_g_b
exact_cardinality: 3
- orientation:
- name: orientation
- description: Description of image relative to some reference frame (e.g.,
- which way is up). Must also specify frame of reference.
- range: text
- required: false
- multivalued: false
- tree_root: true
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@@ -291,12 +394,9 @@ classes:
data:
name: data
description: Index of the frame in the referenced ImageSeries.
- array:
- dimensions:
- - alias: num_times
- range: int32
+ range: IndexSeries__data
required: true
- multivalued: false
+ inlined: true
indexed_timeseries:
name: indexed_timeseries
annotations:
@@ -304,9 +404,68 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
- range: string
tree_root: true
+ IndexSeries__data:
+ name: IndexSeries__data
+ description: Index of the frame in the referenced ImageSeries.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: int32
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml
index e42c742..e36f824 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
index c2323b8..e151936 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
@@ -38,7 +38,6 @@ classes:
description: Values of each feature at each time.
range: AbstractFeatureSeries__data
required: true
- multivalued: false
inlined: true
feature_units:
name: feature_units
@@ -70,6 +69,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Since there can be different units for different features, store
@@ -105,13 +140,70 @@ classes:
data:
name: data
description: Annotations made during an experiment.
+ range: AnnotationSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ AnnotationSeries__data:
+ name: AnnotationSeries__data
+ description: Annotations made during an experiment.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: text
- required: true
- multivalued: false
- tree_root: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
@@ -131,13 +223,70 @@ classes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
+ range: IntervalSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ IntervalSeries__data:
+ name: IntervalSeries__data
+ description: Use values >0 if interval started, <0 if interval ended.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: int8
- required: true
- multivalued: false
- tree_root: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -153,14 +302,12 @@ classes:
description: Data decomposed into frequency bands.
range: DecompositionSeries__data
required: true
- multivalued: false
inlined: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
range: text
required: true
- multivalued: false
source_channels:
name: source_channels
annotations:
@@ -173,8 +320,6 @@ classes:
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
bands:
name: bands
@@ -182,7 +327,6 @@ classes:
from. There should be one row in this table for each band.
range: DecompositionSeries__bands
required: true
- multivalued: false
inlined: true
inlined_as_list: true
source_timeseries:
@@ -191,8 +335,6 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: TimeSeries
@@ -209,6 +351,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -289,63 +467,13 @@ classes:
identifier: true
range: string
required: true
- spike_times_index:
- name: spike_times_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the spike_times dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- spike_times:
- name: spike_times
- description: Spike times for each unit.
- range: Units__spike_times
- required: false
- multivalued: false
- inlined: true
- obs_intervals_index:
- name: obs_intervals_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the obs_intervals dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- obs_intervals:
- name: obs_intervals
- description: Observation intervals for each unit.
+ electrode_group:
+ name: electrode_group
+ description: Electrode group that each spike unit came from.
array:
- dimensions:
- - alias: num_intervals
- - alias: start_end
- exact_cardinality: 2
- range: float64
- required: false
- multivalued: false
- electrodes_index:
- name: electrodes_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into electrodes.
- range: VectorIndex
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: ElectrodeGroup
required: false
multivalued: false
inlined: true
@@ -360,51 +488,69 @@ classes:
value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
- electrode_group:
- name: electrode_group
- description: Electrode group that each spike unit came from.
+ electrodes_index:
+ name: electrodes_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into electrodes.
+ range: VectorIndex
+ inlined: true
+ obs_intervals:
+ name: obs_intervals
+ description: Observation intervals for each unit.
array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: ElectrodeGroup
+ dimensions:
+ - alias: num_intervals
+ - alias: start_end
+ exact_cardinality: 2
+ range: float64
required: false
multivalued: false
+ obs_intervals_index:
+ name: obs_intervals_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the obs_intervals dataset.
+ range: VectorIndex
+ inlined: true
+ spike_times:
+ name: spike_times
+ description: Spike times for each unit.
+ range: Units__spike_times
+ inlined: true
+ spike_times_index:
+ name: spike_times_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the spike_times dataset.
+ range: VectorIndex
inlined: true
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_mean
+ inlined: true
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_sd
+ inlined: true
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
@@ -430,13 +576,8 @@ classes:
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
- array:
- dimensions:
- - alias: num_waveforms
- - alias: num_samples
- range: numeric
- required: false
- multivalued: false
+ range: Units__waveforms
+ inlined: true
waveforms_index:
name: waveforms_index
annotations:
@@ -449,8 +590,6 @@ classes:
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
waveforms_index_index:
name: waveforms_index_index
@@ -464,8 +603,6 @@ classes:
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
Units__spike_times:
@@ -489,3 +626,97 @@ classes:
for the spike time to be between samples.
range: float64
required: false
+ Units__waveform_mean:
+ name: Units__waveform_mean
+ description: Spike waveform mean for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_mean)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_mean
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveform_sd:
+ name: Units__waveform_sd
+ description: Spike waveform standard deviation for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_sd)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_sd
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveforms:
+ name: Units__waveforms
+ description: Individual waveforms for each spike on each electrode. This is a
+ doubly indexed column. The 'waveforms_index' column indexes which waveforms
+ in this column belong to the same spike event for a given unit, where each waveform
+ was recorded from a different electrode. The 'waveforms_index_index' column
+ indexes the 'waveforms_index' column to indicate which spike events belong to
+ a given unit. For example, if the 'waveforms_index_index' column has values
+ [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
+ to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
+ column correspond to the 3 spike events of the second unit, and the next 1 element
+ of the 'waveforms_index' column corresponds to the 1 spike event of the third
+ unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
+ the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
+ that were recorded from 3 different electrodes for the first spike time of the
+ first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
+ for a graphical representation of this example. When there is only one electrode
+ for each unit (i.e., each spike time is associated with a single waveform),
+ then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
+ the number of spike events. The number of electrodes for each spike event should
+ be the same within a given unit. The 'electrodes' column should be used to indicate
+ which electrodes are associated with each unit, and the order of the waveforms
+ within a given unit x spike event should be in the same order as the electrodes
+ referenced in the 'electrodes' column of this table. The number of samples for
+ each waveform must be the same.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveforms)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveforms
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
index 0dc7be0..c4078fa 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
@@ -27,12 +27,9 @@ classes:
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
- array:
- dimensions:
- - alias: num_times
- range: numeric
+ range: OptogeneticSeries__data
required: true
- multivalued: false
+ inlined: true
site:
name: site
annotations:
@@ -40,12 +37,71 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: OptogeneticStimulusSite
- range: string
tree_root: true
+ OptogeneticSeries__data:
+ name: OptogeneticSeries__data
+ description: Applied power for optogenetic stimulus, in watts.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for data, which is fixed to 'watts'.
+ ifabsent: string(watts)
+ range: text
+ required: true
+ equals_string: watts
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
@@ -61,13 +117,11 @@ classes:
description: Description of stimulation site.
range: text
required: true
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
@@ -75,7 +129,6 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
device:
name: device
annotations:
@@ -83,7 +136,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
index 40860fc..aa4dcd3 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
@@ -60,7 +60,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -80,17 +79,9 @@ classes:
data:
name: data
description: Signals from ROIs.
- range: numeric
+ range: RoiResponseSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_rois
+ inlined: true
rois:
name: rois
annotations:
@@ -104,9 +95,73 @@ classes:
on the ROIs stored in this timeseries.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
+ RoiResponseSeries__data:
+ name: RoiResponseSeries__data
+ description: Signals from ROIs.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_rois
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@@ -114,12 +169,19 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(DfOverF)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
Fluorescence:
name: Fluorescence
@@ -128,12 +190,19 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(Fluorescence)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
ImageSegmentation:
name: ImageSegmentation
@@ -146,12 +215,19 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: PlaneSegmentation
+ name:
+ name: name
+ ifabsent: string(ImageSegmentation)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: PlaneSegmentation
tree_root: true
PlaneSegmentation:
name: PlaneSegmentation
@@ -182,6 +258,13 @@ classes:
- alias: num_x
- alias: num_y
- alias: num_z
+ pixel_mask:
+ name: pixel_mask
+ description: 'Pixel masks for each ROI: a list of indices and weights for
+ the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ by the PlaneSegmentation'
+ range: PlaneSegmentation__pixel_mask
+ inlined: true
pixel_mask_index:
name: pixel_mask_index
annotations:
@@ -193,17 +276,13 @@ classes:
value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
- pixel_mask:
- name: pixel_mask
- description: 'Pixel masks for each ROI: a list of indices and weights for
- the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ voxel_mask:
+ name: voxel_mask
+ description: 'Voxel masks for each ROI: a list of indices and weights for
+ the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
- range: PlaneSegmentation__pixel_mask
- required: false
- multivalued: false
+ range: PlaneSegmentation__voxel_mask
inlined: true
voxel_mask_index:
name: voxel_mask_index
@@ -216,17 +295,6 @@ classes:
value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- voxel_mask:
- name: voxel_mask
- description: 'Voxel masks for each ROI: a list of indices and weights for
- the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
- by the PlaneSegmentation'
- range: PlaneSegmentation__voxel_mask
- required: false
- multivalued: false
inlined: true
reference_images:
name: reference_images
@@ -243,7 +311,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -269,24 +336,18 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Pixel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the pixel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
PlaneSegmentation__voxel_mask:
name: PlaneSegmentation__voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
@@ -307,32 +368,24 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Voxel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
z:
name: z
description: Voxel z-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the voxel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@@ -347,27 +400,21 @@ classes:
name: description
description: Description of the imaging plane.
range: text
- required: false
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
range: float32
- required: false
- multivalued: false
indicator:
name: indicator
description: Calcium indicator.
range: text
required: true
- multivalued: false
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
@@ -375,15 +422,12 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
range: ImagingPlane__manifold
- required: false
- multivalued: false
inlined: true
origin_coords:
name: origin_coords
@@ -391,8 +435,6 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
range: ImagingPlane__origin_coords
- required: false
- multivalued: false
inlined: true
grid_spacing:
name: grid_spacing
@@ -400,8 +442,6 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
range: ImagingPlane__grid_spacing
- required: false
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
@@ -423,8 +463,6 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
range: text
- required: false
- multivalued: false
optical_channel:
name: optical_channel
description: An optical channel used to record from an imaging plane.
@@ -440,7 +478,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -573,13 +610,11 @@ classes:
description: Description or other notes about the channel.
range: text
required: true
- multivalued: false
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
range: float32
required: true
- multivalued: false
tree_root: true
MotionCorrection:
name: MotionCorrection
@@ -588,12 +623,19 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: CorrectedImageStack
+ name:
+ name: name
+ ifabsent: string(MotionCorrection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: CorrectedImageStack
tree_root: true
CorrectedImageStack:
name: CorrectedImageStack
@@ -610,7 +652,6 @@ classes:
description: Image stack with frames shifted to the common coordinates.
range: ImageSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
xy_translation:
@@ -619,7 +660,6 @@ classes:
coordinates, for example, to align each frame to a reference image.
range: TimeSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
original:
@@ -629,7 +669,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
index 97007ea..2708c7e 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
@@ -37,30 +37,24 @@ classes:
description: Phase response to stimulus on the first measured axis.
range: ImagingRetinotopy__axis_1_phase_map
required: true
- multivalued: false
inlined: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_1_power_map
- required: false
- multivalued: false
inlined: true
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
range: ImagingRetinotopy__axis_2_phase_map
required: true
- multivalued: false
inlined: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_2_power_map
- required: false
- multivalued: false
inlined: true
axis_descriptions:
name: axis_descriptions
@@ -79,16 +73,12 @@ classes:
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
range: ImagingRetinotopy__focal_depth_image
- required: false
- multivalued: false
inlined: true
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: ImagingRetinotopy__sign_map
- required: false
- multivalued: false
inlined: true
vasculature_image:
name: vasculature_image
@@ -96,7 +86,6 @@ classes:
[rows][columns]'
range: ImagingRetinotopy__vasculature_image
required: true
- multivalued: false
inlined: true
tree_root: true
ImagingRetinotopy__axis_1_phase_map:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml
index 1bfb911..077ab82 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml
@@ -47,7 +47,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -56,7 +55,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
@@ -64,7 +62,6 @@ classes:
exact_number_dimensions: 1
range: TimeSeries
required: true
- multivalued: false
inlined: true
tree_root: true
Image:
@@ -166,7 +163,6 @@ classes:
external file.
range: TimeSeries__data
required: true
- multivalued: false
inlined: true
starting_time:
name: starting_time
@@ -174,8 +170,6 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
range: TimeSeries__starting_time
- required: false
- multivalued: false
inlined: true
timestamps:
name: timestamps
@@ -218,8 +212,6 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
range: TimeSeries__sync
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
tree_root: true
@@ -351,13 +343,24 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: NWBDataInterface
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of this collection of processed data.
+ range: text
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: NWBDataInterface
+ - range: DynamicTable
tree_root: true
Images:
name: Images
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
index 47aa752..c16feb9 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
@@ -38,14 +38,11 @@ classes:
reference frame.
range: SpatialSeries__data
required: true
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
range: text
- required: false
- multivalued: false
tree_root: true
SpatialSeries__data:
name: SpatialSeries__data
@@ -59,6 +56,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
@@ -94,12 +127,19 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: IntervalSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEpochs)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: IntervalSeries
tree_root: true
BehavioralEvents:
name: BehavioralEvents
@@ -107,12 +147,19 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEvents)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
BehavioralTimeSeries:
name: BehavioralTimeSeries
@@ -120,36 +167,57 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralTimeSeries)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(PupilTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(EyeTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
CompassDirection:
name: CompassDirection
@@ -160,22 +228,36 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(CompassDirection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(Position)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
index 4d8e539..e28b420 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
@@ -39,40 +39,6 @@ classes:
about the filter properties as possible.
range: text
required: false
- data:
- name: data
- description: Recorded voltage data.
- range: numeric
- required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - alias: num_samples
- electrodes:
- name: electrodes
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: DynamicTableRegion pointer to the electrodes that this time series
- was generated from.
- range: DynamicTableRegion
- required: true
- multivalued: false
- inlined: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
@@ -90,7 +56,100 @@ classes:
range: float32
required: false
multivalued: false
+ data:
+ name: data
+ description: Recorded voltage data.
+ range: ElectricalSeries__data
+ required: true
+ inlined: true
+ electrodes:
+ name: electrodes
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: DynamicTableRegion pointer to the electrodes that this time series
+ was generated from.
+ range: DynamicTableRegion
+ required: true
+ inlined: true
tree_root: true
+ ElectricalSeries__data:
+ name: ElectricalSeries__data
+ description: Recorded voltage data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. This value
+ is fixed to 'volts'. Actual stored values are not necessarily stored in
+ these units. To access the data in these units, multiply 'data' by 'conversion'
+ and 'channel_conversion' (if present).
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - alias: num_samples
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@@ -111,19 +170,9 @@ classes:
data:
name: data
description: Spike waveforms.
- range: numeric
+ range: SpikeEventSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_events
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_events
- - alias: num_channels
- - alias: num_samples
+ inlined: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
@@ -137,6 +186,73 @@ classes:
required: true
multivalued: false
tree_root: true
+ SpikeEventSeries__data:
+ name: SpikeEventSeries__data
+ description: Spike waveforms.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for waveforms, which is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_samples
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_channels
+ - alias: num_samples
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
@@ -192,7 +308,6 @@ classes:
was generated from.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
EventDetection:
@@ -212,7 +327,6 @@ classes:
or dV/dT threshold, as well as relevant values.
range: text
required: true
- multivalued: false
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
@@ -241,7 +355,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ElectricalSeries
@@ -254,12 +367,19 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpikeEventSeries
+ name:
+ name: name
+ ifabsent: string(EventWaveform)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpikeEventSeries
tree_root: true
FilteredEphys:
name: FilteredEphys
@@ -276,12 +396,19 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(FilteredEphys)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
LFP:
name: LFP
@@ -290,12 +417,19 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(LFP)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
ElectrodeGroup:
name: ElectrodeGroup
@@ -323,8 +457,6 @@ classes:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
- required: false
- multivalued: false
inlined: true
device:
name: device
@@ -333,7 +465,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -356,24 +487,18 @@ classes:
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
y:
name: y
description: y coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
z:
name: z
description: z coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@@ -395,7 +520,6 @@ classes:
description: Filtering applied to data before generating mean/sd
range: text
required: true
- multivalued: false
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
@@ -427,7 +551,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Clustering
@@ -451,7 +574,6 @@ classes:
clusters curated using Klusters, etc)
range: text
required: true
- multivalued: false
num:
name: num
description: Cluster number of each event
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
index e264a54..eedea6f 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
@@ -63,15 +63,11 @@ classes:
value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
timeseries:
name: timeseries
description: An index into a TimeSeries object.
range: TimeIntervals__timeseries
- required: false
- multivalued: false
inlined: true
timeseries_index:
name: timeseries_index
@@ -84,8 +80,6 @@ classes:
value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
TimeIntervals__timeseries:
@@ -108,8 +102,6 @@ classes:
array:
exact_number_dimensions: 1
range: int32
- required: false
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -117,14 +109,10 @@ classes:
array:
exact_number_dimensions: 1
range: int32
- required: false
- multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
array:
exact_number_dimensions: 1
range: TimeSeries
- required: false
- multivalued: false
inlined: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml
index f81b157..5157c95 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml
@@ -81,13 +81,11 @@ classes:
other files.
range: text
required: true
- multivalued: false
session_description:
name: session_description
description: A description of the experimental session and data in the file.
range: text
required: true
- multivalued: false
session_start_time:
name: session_start_time
description: 'Date and time of the experiment/session start. The date is stored
@@ -96,7 +94,6 @@ classes:
offset. Date accuracy is up to milliseconds.'
range: isodatetime
required: true
- multivalued: false
timestamps_reference_time:
name: timestamps_reference_time
description: 'Date and time corresponding to time zero of all timestamps.
@@ -106,7 +103,6 @@ classes:
times stored in the file use this time as reference (i.e., time zero).'
range: isodatetime
required: true
- multivalued: false
acquisition:
name: acquisition
description: Data streams recorded from the system, including ephys, ophys,
@@ -185,7 +181,6 @@ classes:
can exist in the present file or can be linked to a remote library file.
range: NWBFile__stimulus
required: true
- multivalued: false
inlined: true
inlined_as_list: true
general:
@@ -207,7 +202,6 @@ classes:
should not be created unless there is data to store within them.
range: NWBFile__general
required: true
- multivalued: false
inlined: true
inlined_as_list: true
intervals:
@@ -217,18 +211,18 @@ classes:
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
range: NWBFile__intervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
units:
name: units
description: Data about sorted spike units.
range: Units
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
+ specifications:
+ name: specifications
+ description: Nested dictionary of schema specifications
+ range: dict
tree_root: true
NWBFile__stimulus:
name: NWBFile__stimulus
@@ -299,14 +293,10 @@ classes:
name: data_collection
description: Notes about data collection and analysis.
range: text
- required: false
- multivalued: false
experiment_description:
name: experiment_description
description: General description of the experiment.
range: text
- required: false
- multivalued: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
@@ -321,8 +311,6 @@ classes:
name: institution
description: Institution(s) where experiment was performed.
range: text
- required: false
- multivalued: false
keywords:
name: keywords
description: Terms to search over.
@@ -336,28 +324,20 @@ classes:
name: lab
description: Laboratory where experiment was performed.
range: text
- required: false
- multivalued: false
notes:
name: notes
description: Notes about the experiment.
range: text
- required: false
- multivalued: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
range: text
- required: false
- multivalued: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
range: text
- required: false
- multivalued: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
@@ -371,52 +351,31 @@ classes:
name: session_id
description: Lab-specific ID for the session.
range: text
- required: false
- multivalued: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
range: text
- required: false
- multivalued: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
range: general__source_script
- required: false
- multivalued: false
inlined: true
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
range: text
- required: false
- multivalued: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
range: text
- required: false
- multivalued: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
range: text
- required: false
- multivalued: false
- lab_meta_data:
- name: lab_meta_data
- description: Place-holder than can be extended so that lab-specific meta-data
- can be placed in /general.
- range: LabMetaData
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
@@ -431,24 +390,18 @@ classes:
description: Information about the animal or person from which the data was
measured.
range: Subject
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
range: general__extracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
range: general__intracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
optogenetics:
@@ -467,6 +420,14 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
+ value:
+ name: value
+ description: Place-holder than can be extended so that lab-specific meta-data
+ can be placed in /general.
+ range: LabMetaData
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
general__source_script:
name: general__source_script
description: Script file or link to public source code used to create this NWB
@@ -499,22 +460,19 @@ classes:
range: string
required: true
equals_string: extracellular_ephys
- electrode_group:
- name: electrode_group
- description: Physical group of electrodes.
- range: ElectrodeGroup
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
range: extracellular_ephys__electrodes
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
+ value:
+ name: value
+ description: Physical group of electrodes.
+ range: ElectrodeGroup
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
extracellular_ephys__electrodes:
name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
@@ -657,16 +615,6 @@ classes:
etc. If this changes between TimeSeries, filter description should be stored
as a text attribute for each TimeSeries.'
range: text
- required: false
- multivalued: false
- intracellular_electrode:
- name: intracellular_electrode
- description: An intracellular electrode.
- range: IntracellularElectrode
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
sweep_table:
name: sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries.
@@ -674,8 +622,6 @@ classes:
tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
range: SweepTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
intracellular_recordings:
@@ -693,8 +639,6 @@ classes:
to an electrode is also common in intracellular electrophysiology, in which
case other TimeSeries may be used.
range: IntracellularRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
simultaneous_recordings:
@@ -703,8 +647,6 @@ classes:
the IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
range: SimultaneousRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
sequential_recordings:
@@ -714,8 +656,6 @@ classes:
together sequential recordings where the a sequence of stimuli of the same
type with varying parameters have been presented in a sequence.
range: SequentialRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
repetitions:
@@ -725,8 +665,6 @@ classes:
type of stimulus, the RepetitionsTable table is typically used to group
sets of stimuli applied in sequence.
range: RepetitionsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
experimental_conditions:
@@ -734,8 +672,13 @@ classes:
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
range: ExperimentalConditionsTable
- required: false
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ value:
+ name: value
+ description: An intracellular electrode.
+ range: IntracellularElectrode
+ multivalued: true
inlined: true
inlined_as_list: false
NWBFile__intervals:
@@ -756,32 +699,25 @@ classes:
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
- time_intervals:
- name: time_intervals
+ value:
+ name: value
description: Optional additional table(s) for describing other experimental
time intervals.
range: TimeIntervals
- required: false
multivalued: true
inlined: true
inlined_as_list: false
@@ -810,56 +746,38 @@ classes:
name: age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
range: text
- required: false
- multivalued: false
date_of_birth:
name: date_of_birth
description: Date of birth of subject. Can be supplied instead of 'age'.
range: isodatetime
- required: false
- multivalued: false
description:
name: description
description: Description of subject and where subject came from (e.g., breeder,
if animal).
range: text
- required: false
- multivalued: false
genotype:
name: genotype
description: Genetic strain. If absent, assume Wild Type (WT).
range: text
- required: false
- multivalued: false
sex:
name: sex
description: Gender of subject.
range: text
- required: false
- multivalued: false
species:
name: species
description: Species of subject.
range: text
- required: false
- multivalued: false
strain:
name: strain
description: Strain of subject.
range: text
- required: false
- multivalued: false
subject_id:
name: subject_id
description: ID of animal/person used/participating in experiment (lab convention).
range: text
- required: false
- multivalued: false
weight:
name: weight
description: Weight at time of experiment, at time of surgery and at other
important times.
range: text
- required: false
- multivalued: false
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
index d3a808f..848fb69 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
@@ -41,15 +41,12 @@ classes:
description: Recorded voltage or current.
range: PatchClampSeries__data
required: true
- multivalued: false
inlined: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
range: float32
- required: false
- multivalued: false
electrode:
name: electrode
annotations:
@@ -57,7 +54,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: IntracellularElectrode
@@ -74,6 +70,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -99,31 +131,24 @@ classes:
identifier: true
range: string
required: true
+ bias_current:
+ name: bias_current
+ description: Bias current, in amps.
+ range: float32
+ bridge_balance:
+ name: bridge_balance
+ description: Bridge balance, in ohms.
+ range: float32
+ capacitance_compensation:
+ name: capacitance_compensation
+ description: Capacitance compensation, in farads.
+ range: float32
data:
name: data
description: Recorded voltage.
range: CurrentClampSeries__data
required: true
- multivalued: false
inlined: true
- bias_current:
- name: bias_current
- description: Bias current, in amps.
- range: float32
- required: false
- multivalued: false
- bridge_balance:
- name: bridge_balance
- description: Bridge balance, in ohms.
- range: float32
- required: false
- multivalued: false
- capacitance_compensation:
- name: capacitance_compensation
- description: Capacitance compensation, in farads.
- range: float32
- required: false
- multivalued: false
tree_root: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
@@ -136,6 +161,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -147,8 +208,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@@ -175,19 +238,16 @@ classes:
description: Bias current, in amps, fixed to 0.0.
range: float32
required: true
- multivalued: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
range: float32
required: true
- multivalued: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
range: float32
required: true
- multivalued: false
tree_root: true
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
@@ -204,7 +264,6 @@ classes:
description: Stimulus current applied.
range: CurrentClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
CurrentClampStimulusSeries__data:
@@ -218,6 +277,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -229,8 +324,10 @@ classes:
equals_string: amperes
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@@ -243,87 +340,48 @@ classes:
identifier: true
range: string
required: true
- data:
- name: data
- description: Recorded current.
- range: VoltageClampSeries__data
- required: true
- multivalued: false
- inlined: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
range: VoltageClampSeries__capacitance_fast
- required: false
- multivalued: false
inlined: true
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
range: VoltageClampSeries__capacitance_slow
- required: false
- multivalued: false
+ inlined: true
+ data:
+ name: data
+ description: Recorded current.
+ range: VoltageClampSeries__data
+ required: true
inlined: true
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
range: VoltageClampSeries__resistance_comp_bandwidth
- required: false
- multivalued: false
inlined: true
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
range: VoltageClampSeries__resistance_comp_correction
- required: false
- multivalued: false
inlined: true
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
range: VoltageClampSeries__resistance_comp_prediction
- required: false
- multivalued: false
inlined: true
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
range: VoltageClampSeries__whole_cell_capacitance_comp
- required: false
- multivalued: false
inlined: true
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
range: VoltageClampSeries__whole_cell_series_resistance_comp
- required: false
- multivalued: false
inlined: true
tree_root: true
- VoltageClampSeries__data:
- name: VoltageClampSeries__data
- description: Recorded current.
- attributes:
- name:
- name: name
- ifabsent: string(data)
- identifier: true
- range: string
- required: true
- equals_string: data
- unit:
- name: unit
- description: Base unit of measurement for working with the data. which is
- fixed to 'amperes'. Actual stored values are not necessarily stored in these
- units. To access the data in these units, multiply 'data' by 'conversion'.
- ifabsent: string(amperes)
- range: text
- required: true
- equals_string: amperes
- value:
- name: value
- range: AnyType
- required: true
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
@@ -368,6 +426,68 @@ classes:
name: value
range: float32
required: true
+ VoltageClampSeries__data:
+ name: VoltageClampSeries__data
+ description: Recorded current.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. which is
+ fixed to 'amperes'. Actual stored values are not necessarily stored in these
+ units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
+ range: text
+ required: true
+ equals_string: amperes
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
@@ -498,7 +618,6 @@ classes:
description: Stimulus voltage applied.
range: VoltageClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
VoltageClampStimulusSeries__data:
@@ -512,6 +631,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -523,8 +678,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@@ -540,45 +697,32 @@ classes:
description: Description of electrode (e.g., whole-cell, sharp, etc.).
range: text
required: true
- multivalued: false
filtering:
name: filtering
description: Electrode specific filtering.
range: text
- required: false
- multivalued: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
range: text
- required: false
- multivalued: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
range: text
- required: false
- multivalued: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
range: text
- required: false
- multivalued: false
seal:
name: seal
description: Information about seal used for recording.
range: text
- required: false
- multivalued: false
slice:
name: slice
description: Information about slice used for recording.
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
@@ -586,7 +730,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -605,15 +748,6 @@ classes:
identifier: true
range: string
required: true
- sweep_number:
- name: sweep_number
- description: Sweep number of the PatchClampSeries in that row.
- array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: uint32
- required: true
- multivalued: false
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
@@ -636,8 +770,16 @@ classes:
description: Index for series.
range: VectorIndex
required: true
- multivalued: false
inlined: true
+ sweep_number:
+ name: sweep_number
+ description: Sweep number of the PatchClampSeries in that row.
+ array:
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: uint32
+ required: true
+ multivalued: false
tree_root: true
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
@@ -697,7 +839,6 @@ classes:
recording (rows).
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularResponsesTable:
@@ -730,7 +871,6 @@ classes:
recording (rows)
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularRecordingsTable:
@@ -772,15 +912,6 @@ classes:
description: Table for storing intracellular electrode related metadata.
range: IntracellularElectrodesTable
required: true
- multivalued: false
- inlined: true
- inlined_as_list: false
- stimuli:
- name: stimuli
- description: Table for storing intracellular stimulus related metadata.
- range: IntracellularStimuliTable
- required: true
- multivalued: false
inlined: true
inlined_as_list: false
responses:
@@ -788,7 +919,13 @@ classes:
description: Table for storing intracellular response related metadata.
range: IntracellularResponsesTable
required: true
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ stimuli:
+ name: stimuli
+ description: Table for storing intracellular stimulus related metadata.
+ range: IntracellularStimuliTable
+ required: true
inlined: true
inlined_as_list: false
tree_root: true
@@ -812,7 +949,6 @@ classes:
table.
range: SimultaneousRecordingsTable__recordings
required: true
- multivalued: false
inlined: true
recordings_index:
name: recordings_index
@@ -826,7 +962,6 @@ classes:
description: Index dataset for the recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
SimultaneousRecordingsTable__recordings:
@@ -871,7 +1006,6 @@ classes:
table.
range: SequentialRecordingsTable__simultaneous_recordings
required: true
- multivalued: false
inlined: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
@@ -885,7 +1019,6 @@ classes:
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
stimulus_type:
name: stimulus_type
@@ -939,7 +1072,6 @@ classes:
table.
range: RepetitionsTable__sequential_recordings
required: true
- multivalued: false
inlined: true
sequential_recordings_index:
name: sequential_recordings_index
@@ -953,7 +1085,6 @@ classes:
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
RepetitionsTable__sequential_recordings:
@@ -995,7 +1126,6 @@ classes:
description: A reference to one or more rows in the RepetitionsTable table.
range: ExperimentalConditionsTable__repetitions
required: true
- multivalued: false
inlined: true
repetitions_index:
name: repetitions_index
@@ -1009,7 +1139,6 @@ classes:
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
ExperimentalConditionsTable__repetitions:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml
index fec75ec..716c087 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml
@@ -91,21 +91,9 @@ classes:
name: data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
- range: numeric
+ range: ImageSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - alias: z
+ inlined: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
@@ -123,8 +111,6 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
range: ImageSeries__external_file
- required: false
- multivalued: false
inlined: true
format:
name: format
@@ -132,22 +118,89 @@ classes:
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
+ ifabsent: string(raw)
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Device
- range: string
tree_root: true
+ ImageSeries__data:
+ name: ImageSeries__data
+ description: Binary data representing images across frames. If data are stored
+ in an external file, this should be an empty 3D array.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - alias: z
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
@@ -206,7 +259,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -226,12 +278,16 @@ classes:
identifier: true
range: string
required: true
+ data:
+ name: data
+ description: Images presented to subject, either grayscale or RGB
+ range: OpticalSeries__data
+ required: true
+ inlined: true
distance:
name: distance
description: Distance from camera/monitor to target/eye.
range: float32
- required: false
- multivalued: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -247,12 +303,69 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
- data:
- name: data
- description: Images presented to subject, either grayscale or RGB
- range: numeric
+ orientation:
+ name: orientation
+ description: Description of image relative to some reference frame (e.g.,
+ which way is up). Must also specify frame of reference.
+ range: text
+ tree_root: true
+ OpticalSeries__data:
+ name: OpticalSeries__data
+ description: Images presented to subject, either grayscale or RGB
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
required: true
- multivalued: false
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -266,14 +379,6 @@ classes:
- alias: y
- alias: r_g_b
exact_cardinality: 3
- orientation:
- name: orientation
- description: Description of image relative to some reference frame (e.g.,
- which way is up). Must also specify frame of reference.
- range: text
- required: false
- multivalued: false
- tree_root: true
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@@ -292,12 +397,9 @@ classes:
data:
name: data
description: Index of the frame in the referenced ImageSeries.
- array:
- dimensions:
- - alias: num_times
- range: int32
+ range: IndexSeries__data
required: true
- multivalued: false
+ inlined: true
indexed_timeseries:
name: indexed_timeseries
annotations:
@@ -305,9 +407,68 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
- range: string
tree_root: true
+ IndexSeries__data:
+ name: IndexSeries__data
+ description: Index of the frame in the referenced ImageSeries.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: int32
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml
index e42c742..e36f824 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
index ec02fc4..5299631 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
@@ -38,7 +38,6 @@ classes:
description: Values of each feature at each time.
range: AbstractFeatureSeries__data
required: true
- multivalued: false
inlined: true
feature_units:
name: feature_units
@@ -70,6 +69,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Since there can be different units for different features, store
@@ -105,13 +140,70 @@ classes:
data:
name: data
description: Annotations made during an experiment.
+ range: AnnotationSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ AnnotationSeries__data:
+ name: AnnotationSeries__data
+ description: Annotations made during an experiment.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: text
- required: true
- multivalued: false
- tree_root: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
@@ -131,13 +223,70 @@ classes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
+ range: IntervalSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ IntervalSeries__data:
+ name: IntervalSeries__data
+ description: Use values >0 if interval started, <0 if interval ended.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: int8
- required: true
- multivalued: false
- tree_root: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -153,14 +302,12 @@ classes:
description: Data decomposed into frequency bands.
range: DecompositionSeries__data
required: true
- multivalued: false
inlined: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
range: text
required: true
- multivalued: false
source_channels:
name: source_channels
annotations:
@@ -173,8 +320,6 @@ classes:
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
bands:
name: bands
@@ -182,7 +327,6 @@ classes:
from. There should be one row in this table for each band.
range: DecompositionSeries__bands
required: true
- multivalued: false
inlined: true
inlined_as_list: true
source_timeseries:
@@ -191,8 +335,6 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: TimeSeries
@@ -209,6 +351,42 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -289,63 +467,13 @@ classes:
identifier: true
range: string
required: true
- spike_times_index:
- name: spike_times_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the spike_times dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- spike_times:
- name: spike_times
- description: Spike times for each unit.
- range: Units__spike_times
- required: false
- multivalued: false
- inlined: true
- obs_intervals_index:
- name: obs_intervals_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the obs_intervals dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- obs_intervals:
- name: obs_intervals
- description: Observation intervals for each unit.
+ electrode_group:
+ name: electrode_group
+ description: Electrode group that each spike unit came from.
array:
- dimensions:
- - alias: num_intervals
- - alias: start_end
- exact_cardinality: 2
- range: float64
- required: false
- multivalued: false
- electrodes_index:
- name: electrodes_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into electrodes.
- range: VectorIndex
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: ElectrodeGroup
required: false
multivalued: false
inlined: true
@@ -360,51 +488,69 @@ classes:
value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
- electrode_group:
- name: electrode_group
- description: Electrode group that each spike unit came from.
+ electrodes_index:
+ name: electrodes_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into electrodes.
+ range: VectorIndex
+ inlined: true
+ obs_intervals:
+ name: obs_intervals
+ description: Observation intervals for each unit.
array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: ElectrodeGroup
+ dimensions:
+ - alias: num_intervals
+ - alias: start_end
+ exact_cardinality: 2
+ range: float64
required: false
multivalued: false
+ obs_intervals_index:
+ name: obs_intervals_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the obs_intervals dataset.
+ range: VectorIndex
+ inlined: true
+ spike_times:
+ name: spike_times
+ description: Spike times for each unit.
+ range: Units__spike_times
+ inlined: true
+ spike_times_index:
+ name: spike_times_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the spike_times dataset.
+ range: VectorIndex
inlined: true
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_mean
+ inlined: true
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_sd
+ inlined: true
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
@@ -430,13 +576,8 @@ classes:
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
- array:
- dimensions:
- - alias: num_waveforms
- - alias: num_samples
- range: numeric
- required: false
- multivalued: false
+ range: Units__waveforms
+ inlined: true
waveforms_index:
name: waveforms_index
annotations:
@@ -449,8 +590,6 @@ classes:
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
waveforms_index_index:
name: waveforms_index_index
@@ -464,8 +603,6 @@ classes:
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
Units__spike_times:
@@ -489,3 +626,97 @@ classes:
for the spike time to be between samples.
range: float64
required: false
+ Units__waveform_mean:
+ name: Units__waveform_mean
+ description: Spike waveform mean for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_mean)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_mean
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveform_sd:
+ name: Units__waveform_sd
+ description: Spike waveform standard deviation for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_sd)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_sd
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveforms:
+ name: Units__waveforms
+ description: Individual waveforms for each spike on each electrode. This is a
+ doubly indexed column. The 'waveforms_index' column indexes which waveforms
+ in this column belong to the same spike event for a given unit, where each waveform
+ was recorded from a different electrode. The 'waveforms_index_index' column
+ indexes the 'waveforms_index' column to indicate which spike events belong to
+ a given unit. For example, if the 'waveforms_index_index' column has values
+ [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
+ to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
+ column correspond to the 3 spike events of the second unit, and the next 1 element
+ of the 'waveforms_index' column corresponds to the 1 spike event of the third
+ unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
+ the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
+ that were recorded from 3 different electrodes for the first spike time of the
+ first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
+ for a graphical representation of this example. When there is only one electrode
+ for each unit (i.e., each spike time is associated with a single waveform),
+ then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
+ the number of spike events. The number of electrodes for each spike event should
+ be the same within a given unit. The 'electrodes' column should be used to indicate
+ which electrodes are associated with each unit, and the order of the waveforms
+ within a given unit x spike event should be in the same order as the electrodes
+ referenced in the 'electrodes' column of this table. The number of samples for
+ each waveform must be the same.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveforms)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveforms
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
index cbe1a6d..67986b2 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
@@ -27,12 +27,9 @@ classes:
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
- array:
- dimensions:
- - alias: num_times
- range: numeric
+ range: OptogeneticSeries__data
required: true
- multivalued: false
+ inlined: true
site:
name: site
annotations:
@@ -40,12 +37,71 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: OptogeneticStimulusSite
- range: string
tree_root: true
+ OptogeneticSeries__data:
+ name: OptogeneticSeries__data
+ description: Applied power for optogenetic stimulus, in watts.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for data, which is fixed to 'watts'.
+ ifabsent: string(watts)
+ range: text
+ required: true
+ equals_string: watts
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
@@ -61,13 +117,11 @@ classes:
description: Description of stimulation site.
range: text
required: true
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
@@ -75,7 +129,6 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
device:
name: device
annotations:
@@ -83,7 +136,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
index aec8547..d63b6ba 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
@@ -60,7 +60,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -80,17 +79,9 @@ classes:
data:
name: data
description: Signals from ROIs.
- range: numeric
+ range: RoiResponseSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_rois
+ inlined: true
rois:
name: rois
annotations:
@@ -104,9 +95,73 @@ classes:
on the ROIs stored in this timeseries.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
+ RoiResponseSeries__data:
+ name: RoiResponseSeries__data
+ description: Signals from ROIs.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_rois
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@@ -114,12 +169,19 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(DfOverF)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
Fluorescence:
name: Fluorescence
@@ -128,12 +190,19 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(Fluorescence)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
ImageSegmentation:
name: ImageSegmentation
@@ -146,12 +215,19 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: PlaneSegmentation
+ name:
+ name: name
+ ifabsent: string(ImageSegmentation)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: PlaneSegmentation
tree_root: true
PlaneSegmentation:
name: PlaneSegmentation
@@ -182,6 +258,13 @@ classes:
- alias: num_x
- alias: num_y
- alias: num_z
+ pixel_mask:
+ name: pixel_mask
+ description: 'Pixel masks for each ROI: a list of indices and weights for
+ the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ by the PlaneSegmentation'
+ range: PlaneSegmentation__pixel_mask
+ inlined: true
pixel_mask_index:
name: pixel_mask_index
annotations:
@@ -193,17 +276,13 @@ classes:
value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
- pixel_mask:
- name: pixel_mask
- description: 'Pixel masks for each ROI: a list of indices and weights for
- the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ voxel_mask:
+ name: voxel_mask
+ description: 'Voxel masks for each ROI: a list of indices and weights for
+ the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
- range: PlaneSegmentation__pixel_mask
- required: false
- multivalued: false
+ range: PlaneSegmentation__voxel_mask
inlined: true
voxel_mask_index:
name: voxel_mask_index
@@ -216,17 +295,6 @@ classes:
value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- voxel_mask:
- name: voxel_mask
- description: 'Voxel masks for each ROI: a list of indices and weights for
- the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
- by the PlaneSegmentation'
- range: PlaneSegmentation__voxel_mask
- required: false
- multivalued: false
inlined: true
reference_images:
name: reference_images
@@ -243,7 +311,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -269,24 +336,18 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Pixel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the pixel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
PlaneSegmentation__voxel_mask:
name: PlaneSegmentation__voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
@@ -307,32 +368,24 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Voxel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
z:
name: z
description: Voxel z-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the voxel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@@ -347,27 +400,21 @@ classes:
name: description
description: Description of the imaging plane.
range: text
- required: false
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
range: float32
- required: false
- multivalued: false
indicator:
name: indicator
description: Calcium indicator.
range: text
required: true
- multivalued: false
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
@@ -375,15 +422,12 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
range: ImagingPlane__manifold
- required: false
- multivalued: false
inlined: true
origin_coords:
name: origin_coords
@@ -391,8 +435,6 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
range: ImagingPlane__origin_coords
- required: false
- multivalued: false
inlined: true
grid_spacing:
name: grid_spacing
@@ -400,8 +442,6 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
range: ImagingPlane__grid_spacing
- required: false
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
@@ -423,8 +463,6 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
range: text
- required: false
- multivalued: false
optical_channel:
name: optical_channel
description: An optical channel used to record from an imaging plane.
@@ -440,7 +478,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -573,13 +610,11 @@ classes:
description: Description or other notes about the channel.
range: text
required: true
- multivalued: false
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
range: float32
required: true
- multivalued: false
tree_root: true
MotionCorrection:
name: MotionCorrection
@@ -588,12 +623,19 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: CorrectedImageStack
+ name:
+ name: name
+ ifabsent: string(MotionCorrection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: CorrectedImageStack
tree_root: true
CorrectedImageStack:
name: CorrectedImageStack
@@ -610,7 +652,6 @@ classes:
description: Image stack with frames shifted to the common coordinates.
range: ImageSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
xy_translation:
@@ -619,7 +660,6 @@ classes:
coordinates, for example, to align each frame to a reference image.
range: TimeSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
original:
@@ -629,7 +669,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
index f30f06f..1b75917 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
@@ -37,30 +37,24 @@ classes:
description: Phase response to stimulus on the first measured axis.
range: ImagingRetinotopy__axis_1_phase_map
required: true
- multivalued: false
inlined: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_1_power_map
- required: false
- multivalued: false
inlined: true
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
range: ImagingRetinotopy__axis_2_phase_map
required: true
- multivalued: false
inlined: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_2_power_map
- required: false
- multivalued: false
inlined: true
axis_descriptions:
name: axis_descriptions
@@ -79,16 +73,12 @@ classes:
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
range: ImagingRetinotopy__focal_depth_image
- required: false
- multivalued: false
inlined: true
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: ImagingRetinotopy__sign_map
- required: false
- multivalued: false
inlined: true
vasculature_image:
name: vasculature_image
@@ -96,7 +86,6 @@ classes:
[rows][columns]'
range: ImagingRetinotopy__vasculature_image
required: true
- multivalued: false
inlined: true
tree_root: true
ImagingRetinotopy__axis_1_phase_map:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml
index 547dd4c..a92af97 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml
@@ -47,7 +47,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -56,7 +55,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
@@ -64,7 +62,6 @@ classes:
exact_number_dimensions: 1
range: TimeSeries
required: true
- multivalued: false
inlined: true
tree_root: true
Image:
@@ -189,7 +186,6 @@ classes:
external file.
range: TimeSeries__data
required: true
- multivalued: false
inlined: true
starting_time:
name: starting_time
@@ -197,8 +193,6 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
range: TimeSeries__starting_time
- required: false
- multivalued: false
inlined: true
timestamps:
name: timestamps
@@ -241,8 +235,6 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
range: TimeSeries__sync
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
tree_root: true
@@ -383,13 +375,24 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: NWBDataInterface
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of this collection of processed data.
+ range: text
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: NWBDataInterface
+ - range: DynamicTable
tree_root: true
Images:
name: Images
@@ -429,7 +432,5 @@ classes:
and only once, so the dataset should have the same length as the number
of images.
range: ImageReferences
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
index 94ff5f8..cdb2a98 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
@@ -38,14 +38,11 @@ classes:
reference frame.
range: SpatialSeries__data
required: true
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
range: text
- required: false
- multivalued: false
tree_root: true
SpatialSeries__data:
name: SpatialSeries__data
@@ -59,6 +56,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
@@ -106,12 +148,19 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: IntervalSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEpochs)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: IntervalSeries
tree_root: true
BehavioralEvents:
name: BehavioralEvents
@@ -119,12 +168,19 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEvents)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
BehavioralTimeSeries:
name: BehavioralTimeSeries
@@ -132,36 +188,57 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralTimeSeries)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(PupilTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(EyeTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
CompassDirection:
name: CompassDirection
@@ -172,22 +249,36 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(CompassDirection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(Position)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
index b611d74..8439a6a 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
@@ -39,40 +39,6 @@ classes:
about the filter properties as possible.
range: text
required: false
- data:
- name: data
- description: Recorded voltage data.
- range: numeric
- required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - alias: num_samples
- electrodes:
- name: electrodes
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: DynamicTableRegion pointer to the electrodes that this time series
- was generated from.
- range: DynamicTableRegion
- required: true
- multivalued: false
- inlined: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
@@ -90,7 +56,109 @@ classes:
range: float32
required: false
multivalued: false
+ data:
+ name: data
+ description: Recorded voltage data.
+ range: ElectricalSeries__data
+ required: true
+ inlined: true
+ electrodes:
+ name: electrodes
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: DynamicTableRegion pointer to the electrodes that this time series
+ was generated from.
+ range: DynamicTableRegion
+ required: true
+ inlined: true
tree_root: true
+ ElectricalSeries__data:
+ name: ElectricalSeries__data
+ description: Recorded voltage data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. This value
+ is fixed to 'volts'. Actual stored values are not necessarily stored in
+ these units. To access the data in these units, multiply 'data' by 'conversion',
+ followed by 'channel_conversion' (if present), and then add 'offset'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - alias: num_samples
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@@ -111,19 +179,9 @@ classes:
data:
name: data
description: Spike waveforms.
- range: numeric
+ range: SpikeEventSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_events
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_events
- - alias: num_channels
- - alias: num_samples
+ inlined: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
@@ -137,6 +195,82 @@ classes:
required: true
multivalued: false
tree_root: true
+ SpikeEventSeries__data:
+ name: SpikeEventSeries__data
+ description: Spike waveforms.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for waveforms, which is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_samples
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_channels
+ - alias: num_samples
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
@@ -192,7 +326,6 @@ classes:
was generated from.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
EventDetection:
@@ -212,7 +345,6 @@ classes:
or dV/dT threshold, as well as relevant values.
range: text
required: true
- multivalued: false
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
@@ -241,7 +373,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ElectricalSeries
@@ -254,12 +385,19 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpikeEventSeries
+ name:
+ name: name
+ ifabsent: string(EventWaveform)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpikeEventSeries
tree_root: true
FilteredEphys:
name: FilteredEphys
@@ -276,12 +414,19 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(FilteredEphys)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
LFP:
name: LFP
@@ -290,12 +435,19 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(LFP)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
ElectrodeGroup:
name: ElectrodeGroup
@@ -323,8 +475,6 @@ classes:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
- required: false
- multivalued: false
inlined: true
device:
name: device
@@ -333,7 +483,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -356,24 +505,18 @@ classes:
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
y:
name: y
description: y coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
z:
name: z
description: z coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@@ -395,7 +538,6 @@ classes:
description: Filtering applied to data before generating mean/sd
range: text
required: true
- multivalued: false
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
@@ -427,7 +569,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Clustering
@@ -451,7 +592,6 @@ classes:
clusters curated using Klusters, etc)
range: text
required: true
- multivalued: false
num:
name: num
description: Cluster number of each event
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
index 9857394..81a3ca5 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
@@ -63,8 +63,6 @@ classes:
value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
timeseries:
name: timeseries
@@ -77,8 +75,6 @@ classes:
value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
- required: false
- multivalued: false
inlined: true
timeseries_index:
name: timeseries_index
@@ -91,7 +87,5 @@ classes:
value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml
index 01ef5b5..82f7932 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml
@@ -81,13 +81,11 @@ classes:
other files.
range: text
required: true
- multivalued: false
session_description:
name: session_description
description: A description of the experimental session and data in the file.
range: text
required: true
- multivalued: false
session_start_time:
name: session_start_time
description: 'Date and time of the experiment/session start. The date is stored
@@ -96,7 +94,6 @@ classes:
offset. Date accuracy is up to milliseconds.'
range: isodatetime
required: true
- multivalued: false
timestamps_reference_time:
name: timestamps_reference_time
description: 'Date and time corresponding to time zero of all timestamps.
@@ -106,7 +103,6 @@ classes:
times stored in the file use this time as reference (i.e., time zero).'
range: isodatetime
required: true
- multivalued: false
acquisition:
name: acquisition
description: Data streams recorded from the system, including ephys, ophys,
@@ -185,7 +181,6 @@ classes:
can exist in the present file or can be linked to a remote library file.
range: NWBFile__stimulus
required: true
- multivalued: false
inlined: true
inlined_as_list: true
general:
@@ -207,7 +202,6 @@ classes:
should not be created unless there is data to store within them.
range: NWBFile__general
required: true
- multivalued: false
inlined: true
inlined_as_list: true
intervals:
@@ -217,18 +211,18 @@ classes:
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
range: NWBFile__intervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
units:
name: units
description: Data about sorted spike units.
range: Units
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
+ specifications:
+ name: specifications
+ description: Nested dictionary of schema specifications
+ range: dict
tree_root: true
NWBFile__stimulus:
name: NWBFile__stimulus
@@ -300,14 +294,10 @@ classes:
name: data_collection
description: Notes about data collection and analysis.
range: text
- required: false
- multivalued: false
experiment_description:
name: experiment_description
description: General description of the experiment.
range: text
- required: false
- multivalued: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
@@ -322,8 +312,6 @@ classes:
name: institution
description: Institution(s) where experiment was performed.
range: text
- required: false
- multivalued: false
keywords:
name: keywords
description: Terms to search over.
@@ -337,28 +325,20 @@ classes:
name: lab
description: Laboratory where experiment was performed.
range: text
- required: false
- multivalued: false
notes:
name: notes
description: Notes about the experiment.
range: text
- required: false
- multivalued: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
range: text
- required: false
- multivalued: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
range: text
- required: false
- multivalued: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
@@ -372,52 +352,31 @@ classes:
name: session_id
description: Lab-specific ID for the session.
range: text
- required: false
- multivalued: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
range: text
- required: false
- multivalued: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
range: general__source_script
- required: false
- multivalued: false
inlined: true
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
range: text
- required: false
- multivalued: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
range: text
- required: false
- multivalued: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
range: text
- required: false
- multivalued: false
- lab_meta_data:
- name: lab_meta_data
- description: Place-holder than can be extended so that lab-specific meta-data
- can be placed in /general.
- range: LabMetaData
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
@@ -432,24 +391,18 @@ classes:
description: Information about the animal or person from which the data was
measured.
range: Subject
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
range: general__extracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
range: general__intracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
optogenetics:
@@ -468,6 +421,14 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
+ value:
+ name: value
+ description: Place-holder than can be extended so that lab-specific meta-data
+ can be placed in /general.
+ range: LabMetaData
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
general__source_script:
name: general__source_script
description: Script file or link to public source code used to create this NWB
@@ -500,22 +461,19 @@ classes:
range: string
required: true
equals_string: extracellular_ephys
- electrode_group:
- name: electrode_group
- description: Physical group of electrodes.
- range: ElectrodeGroup
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
range: extracellular_ephys__electrodes
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
+ value:
+ name: value
+ description: Physical group of electrodes.
+ range: ElectrodeGroup
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
extracellular_ephys__electrodes:
name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
@@ -660,16 +618,6 @@ classes:
etc. If this changes between TimeSeries, filter description should be stored
as a text attribute for each TimeSeries.'
range: text
- required: false
- multivalued: false
- intracellular_electrode:
- name: intracellular_electrode
- description: An intracellular electrode.
- range: IntracellularElectrode
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
sweep_table:
name: sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries.
@@ -677,8 +625,6 @@ classes:
tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
range: SweepTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
intracellular_recordings:
@@ -696,8 +642,6 @@ classes:
to an electrode is also common in intracellular electrophysiology, in which
case other TimeSeries may be used.
range: IntracellularRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
simultaneous_recordings:
@@ -706,8 +650,6 @@ classes:
the IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
range: SimultaneousRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
sequential_recordings:
@@ -717,8 +659,6 @@ classes:
together sequential recordings where the a sequence of stimuli of the same
type with varying parameters have been presented in a sequence.
range: SequentialRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
repetitions:
@@ -728,8 +668,6 @@ classes:
type of stimulus, the RepetitionsTable table is typically used to group
sets of stimuli applied in sequence.
range: RepetitionsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
experimental_conditions:
@@ -737,8 +675,13 @@ classes:
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
range: ExperimentalConditionsTable
- required: false
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ value:
+ name: value
+ description: An intracellular electrode.
+ range: IntracellularElectrode
+ multivalued: true
inlined: true
inlined_as_list: false
NWBFile__intervals:
@@ -759,32 +702,25 @@ classes:
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
- time_intervals:
- name: time_intervals
+ value:
+ name: value
description: Optional additional table(s) for describing other experimental
time intervals.
range: TimeIntervals
- required: false
multivalued: true
inlined: true
inlined_as_list: false
@@ -813,56 +749,38 @@ classes:
name: age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
range: text
- required: false
- multivalued: false
date_of_birth:
name: date_of_birth
description: Date of birth of subject. Can be supplied instead of 'age'.
range: isodatetime
- required: false
- multivalued: false
description:
name: description
description: Description of subject and where subject came from (e.g., breeder,
if animal).
range: text
- required: false
- multivalued: false
genotype:
name: genotype
description: Genetic strain. If absent, assume Wild Type (WT).
range: text
- required: false
- multivalued: false
sex:
name: sex
description: Gender of subject.
range: text
- required: false
- multivalued: false
species:
name: species
description: Species of subject.
range: text
- required: false
- multivalued: false
strain:
name: strain
description: Strain of subject.
range: text
- required: false
- multivalued: false
subject_id:
name: subject_id
description: ID of animal/person used/participating in experiment (lab convention).
range: text
- required: false
- multivalued: false
weight:
name: weight
description: Weight at time of experiment, at time of surgery and at other
important times.
range: text
- required: false
- multivalued: false
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
index 257b07b..cc89c87 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
@@ -41,15 +41,12 @@ classes:
description: Recorded voltage or current.
range: PatchClampSeries__data
required: true
- multivalued: false
inlined: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
range: float32
- required: false
- multivalued: false
electrode:
name: electrode
annotations:
@@ -57,7 +54,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: IntracellularElectrode
@@ -74,6 +70,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -99,31 +140,24 @@ classes:
identifier: true
range: string
required: true
+ bias_current:
+ name: bias_current
+ description: Bias current, in amps.
+ range: float32
+ bridge_balance:
+ name: bridge_balance
+ description: Bridge balance, in ohms.
+ range: float32
+ capacitance_compensation:
+ name: capacitance_compensation
+ description: Capacitance compensation, in farads.
+ range: float32
data:
name: data
description: Recorded voltage.
range: CurrentClampSeries__data
required: true
- multivalued: false
inlined: true
- bias_current:
- name: bias_current
- description: Bias current, in amps.
- range: float32
- required: false
- multivalued: false
- bridge_balance:
- name: bridge_balance
- description: Bridge balance, in ohms.
- range: float32
- required: false
- multivalued: false
- capacitance_compensation:
- name: capacitance_compensation
- description: Capacitance compensation, in farads.
- range: float32
- required: false
- multivalued: false
tree_root: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
@@ -136,6 +170,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -148,8 +227,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@@ -176,19 +257,16 @@ classes:
description: Bias current, in amps, fixed to 0.0.
range: float32
required: true
- multivalued: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
range: float32
required: true
- multivalued: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
range: float32
required: true
- multivalued: false
tree_root: true
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
@@ -205,7 +283,6 @@ classes:
description: Stimulus current applied.
range: CurrentClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
CurrentClampStimulusSeries__data:
@@ -219,6 +296,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -231,8 +353,10 @@ classes:
equals_string: amperes
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@@ -245,88 +369,48 @@ classes:
identifier: true
range: string
required: true
- data:
- name: data
- description: Recorded current.
- range: VoltageClampSeries__data
- required: true
- multivalued: false
- inlined: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
range: VoltageClampSeries__capacitance_fast
- required: false
- multivalued: false
inlined: true
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
range: VoltageClampSeries__capacitance_slow
- required: false
- multivalued: false
+ inlined: true
+ data:
+ name: data
+ description: Recorded current.
+ range: VoltageClampSeries__data
+ required: true
inlined: true
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
range: VoltageClampSeries__resistance_comp_bandwidth
- required: false
- multivalued: false
inlined: true
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
range: VoltageClampSeries__resistance_comp_correction
- required: false
- multivalued: false
inlined: true
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
range: VoltageClampSeries__resistance_comp_prediction
- required: false
- multivalued: false
inlined: true
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
range: VoltageClampSeries__whole_cell_capacitance_comp
- required: false
- multivalued: false
inlined: true
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
range: VoltageClampSeries__whole_cell_series_resistance_comp
- required: false
- multivalued: false
inlined: true
tree_root: true
- VoltageClampSeries__data:
- name: VoltageClampSeries__data
- description: Recorded current.
- attributes:
- name:
- name: name
- ifabsent: string(data)
- identifier: true
- range: string
- required: true
- equals_string: data
- unit:
- name: unit
- description: Base unit of measurement for working with the data. which is
- fixed to 'amperes'. Actual stored values are not necessarily stored in these
- units. To access the data in these units, multiply 'data' by 'conversion'
- and add 'offset'.
- ifabsent: string(amperes)
- range: text
- required: true
- equals_string: amperes
- value:
- name: value
- range: AnyType
- required: true
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
@@ -371,6 +455,78 @@ classes:
name: value
range: float32
required: true
+ VoltageClampSeries__data:
+ name: VoltageClampSeries__data
+ description: Recorded current.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. which is
+ fixed to 'amperes'. Actual stored values are not necessarily stored in these
+ units. To access the data in these units, multiply 'data' by 'conversion'
+ and add 'offset'.
+ ifabsent: string(amperes)
+ range: text
+ required: true
+ equals_string: amperes
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
@@ -501,7 +657,6 @@ classes:
description: Stimulus voltage applied.
range: VoltageClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
VoltageClampStimulusSeries__data:
@@ -515,6 +670,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -527,8 +727,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@@ -543,52 +745,37 @@ classes:
name: cell_id
description: unique ID of the cell
range: text
- required: false
- multivalued: false
description:
name: description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
range: text
required: true
- multivalued: false
filtering:
name: filtering
description: Electrode specific filtering.
range: text
- required: false
- multivalued: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
range: text
- required: false
- multivalued: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
range: text
- required: false
- multivalued: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
range: text
- required: false
- multivalued: false
seal:
name: seal
description: Information about seal used for recording.
range: text
- required: false
- multivalued: false
slice:
name: slice
description: Information about slice used for recording.
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
@@ -596,7 +783,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -615,15 +801,6 @@ classes:
identifier: true
range: string
required: true
- sweep_number:
- name: sweep_number
- description: Sweep number of the PatchClampSeries in that row.
- array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: uint32
- required: true
- multivalued: false
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
@@ -646,8 +823,16 @@ classes:
description: Index for series.
range: VectorIndex
required: true
- multivalued: false
inlined: true
+ sweep_number:
+ name: sweep_number
+ description: Sweep number of the PatchClampSeries in that row.
+ array:
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: uint32
+ required: true
+ multivalued: false
tree_root: true
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
@@ -707,7 +892,6 @@ classes:
recording (rows).
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularResponsesTable:
@@ -740,7 +924,6 @@ classes:
recording (rows)
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularRecordingsTable:
@@ -782,15 +965,6 @@ classes:
description: Table for storing intracellular electrode related metadata.
range: IntracellularElectrodesTable
required: true
- multivalued: false
- inlined: true
- inlined_as_list: false
- stimuli:
- name: stimuli
- description: Table for storing intracellular stimulus related metadata.
- range: IntracellularStimuliTable
- required: true
- multivalued: false
inlined: true
inlined_as_list: false
responses:
@@ -798,7 +972,13 @@ classes:
description: Table for storing intracellular response related metadata.
range: IntracellularResponsesTable
required: true
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ stimuli:
+ name: stimuli
+ description: Table for storing intracellular stimulus related metadata.
+ range: IntracellularStimuliTable
+ required: true
inlined: true
inlined_as_list: false
tree_root: true
@@ -822,7 +1002,6 @@ classes:
table.
range: SimultaneousRecordingsTable__recordings
required: true
- multivalued: false
inlined: true
recordings_index:
name: recordings_index
@@ -836,7 +1015,6 @@ classes:
description: Index dataset for the recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
SimultaneousRecordingsTable__recordings:
@@ -881,7 +1059,6 @@ classes:
table.
range: SequentialRecordingsTable__simultaneous_recordings
required: true
- multivalued: false
inlined: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
@@ -895,7 +1072,6 @@ classes:
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
stimulus_type:
name: stimulus_type
@@ -949,7 +1125,6 @@ classes:
table.
range: RepetitionsTable__sequential_recordings
required: true
- multivalued: false
inlined: true
sequential_recordings_index:
name: sequential_recordings_index
@@ -963,7 +1138,6 @@ classes:
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
RepetitionsTable__sequential_recordings:
@@ -1005,7 +1179,6 @@ classes:
description: A reference to one or more rows in the RepetitionsTable table.
range: ExperimentalConditionsTable__repetitions
required: true
- multivalued: false
inlined: true
repetitions_index:
name: repetitions_index
@@ -1019,7 +1192,6 @@ classes:
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
ExperimentalConditionsTable__repetitions:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml
index dd4d2f4..1d9c427 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml
@@ -91,21 +91,9 @@ classes:
name: data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
- range: numeric
+ range: ImageSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - alias: z
+ inlined: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
@@ -123,8 +111,6 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
range: ImageSeries__external_file
- required: false
- multivalued: false
inlined: true
format:
name: format
@@ -132,22 +118,98 @@ classes:
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
+ ifabsent: string(raw)
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Device
- range: string
tree_root: true
+ ImageSeries__data:
+ name: ImageSeries__data
+ description: Binary data representing images across frames. If data are stored
+ in an external file, this should be an empty 3D array.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - alias: z
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
@@ -206,7 +268,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -226,12 +287,16 @@ classes:
identifier: true
range: string
required: true
+ data:
+ name: data
+ description: Images presented to subject, either grayscale or RGB
+ range: OpticalSeries__data
+ required: true
+ inlined: true
distance:
name: distance
description: Distance from camera/monitor to target/eye.
range: float32
- required: false
- multivalued: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -247,12 +312,78 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
- data:
- name: data
- description: Images presented to subject, either grayscale or RGB
- range: numeric
+ orientation:
+ name: orientation
+ description: Description of image relative to some reference frame (e.g.,
+ which way is up). Must also specify frame of reference.
+ range: text
+ tree_root: true
+ OpticalSeries__data:
+ name: OpticalSeries__data
+ description: Images presented to subject, either grayscale or RGB
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
required: true
- multivalued: false
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -266,14 +397,6 @@ classes:
- alias: y
- alias: r_g_b
exact_cardinality: 3
- orientation:
- name: orientation
- description: Description of image relative to some reference frame (e.g.,
- which way is up). Must also specify frame of reference.
- range: text
- required: false
- multivalued: false
- tree_root: true
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@@ -294,20 +417,15 @@ classes:
name: data
description: Index of the image (using zero-indexing) in the linked Images
object.
- array:
- dimensions:
- - alias: num_times
- range: uint32
+ range: IndexSeries__data
required: true
- multivalued: false
+ inlined: true
indexed_timeseries:
name: indexed_timeseries
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -318,10 +436,62 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Images
- range: string
tree_root: true
+ IndexSeries__data:
+ name: IndexSeries__data
+ description: Index of the image (using zero-indexing) in the linked Images object.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: This field is unused by IndexSeries.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: This field is unused by IndexSeries.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: This field is unused by IndexSeries.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: This field is unused by IndexSeries and has the value N/A.
+ ifabsent: string(N/A)
+ range: text
+ required: true
+ equals_string: N/A
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: uint32
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml
index e42c742..e36f824 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
index 5bfeb44..8cb7a86 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
@@ -38,7 +38,6 @@ classes:
description: Values of each feature at each time.
range: AbstractFeatureSeries__data
required: true
- multivalued: false
inlined: true
feature_units:
name: feature_units
@@ -70,6 +69,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Since there can be different units for different features, store
@@ -105,13 +149,79 @@ classes:
data:
name: data
description: Annotations made during an experiment.
+ range: AnnotationSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ AnnotationSeries__data:
+ name: AnnotationSeries__data
+ description: Annotations made during an experiment.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: text
- required: true
- multivalued: false
- tree_root: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
@@ -131,13 +241,79 @@ classes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
+ range: IntervalSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ IntervalSeries__data:
+ name: IntervalSeries__data
+ description: Use values >0 if interval started, <0 if interval ended.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: int8
- required: true
- multivalued: false
- tree_root: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -153,14 +329,12 @@ classes:
description: Data decomposed into frequency bands.
range: DecompositionSeries__data
required: true
- multivalued: false
inlined: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
range: text
required: true
- multivalued: false
source_channels:
name: source_channels
annotations:
@@ -173,8 +347,6 @@ classes:
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
bands:
name: bands
@@ -182,7 +354,6 @@ classes:
from. There should be one row in this table for each band.
range: DecompositionSeries__bands
required: true
- multivalued: false
inlined: true
inlined_as_list: true
source_timeseries:
@@ -191,8 +362,6 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: TimeSeries
@@ -209,6 +378,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -289,63 +503,13 @@ classes:
identifier: true
range: string
required: true
- spike_times_index:
- name: spike_times_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the spike_times dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- spike_times:
- name: spike_times
- description: Spike times for each unit.
- range: Units__spike_times
- required: false
- multivalued: false
- inlined: true
- obs_intervals_index:
- name: obs_intervals_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the obs_intervals dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- obs_intervals:
- name: obs_intervals
- description: Observation intervals for each unit.
+ electrode_group:
+ name: electrode_group
+ description: Electrode group that each spike unit came from.
array:
- dimensions:
- - alias: num_intervals
- - alias: start_end
- exact_cardinality: 2
- range: float64
- required: false
- multivalued: false
- electrodes_index:
- name: electrodes_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into electrodes.
- range: VectorIndex
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: ElectrodeGroup
required: false
multivalued: false
inlined: true
@@ -360,51 +524,69 @@ classes:
value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
- electrode_group:
- name: electrode_group
- description: Electrode group that each spike unit came from.
+ electrodes_index:
+ name: electrodes_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into electrodes.
+ range: VectorIndex
+ inlined: true
+ obs_intervals:
+ name: obs_intervals
+ description: Observation intervals for each unit.
array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: ElectrodeGroup
+ dimensions:
+ - alias: num_intervals
+ - alias: start_end
+ exact_cardinality: 2
+ range: float64
required: false
multivalued: false
+ obs_intervals_index:
+ name: obs_intervals_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the obs_intervals dataset.
+ range: VectorIndex
+ inlined: true
+ spike_times:
+ name: spike_times
+ description: Spike times for each unit.
+ range: Units__spike_times
+ inlined: true
+ spike_times_index:
+ name: spike_times_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the spike_times dataset.
+ range: VectorIndex
inlined: true
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_mean
+ inlined: true
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_sd
+ inlined: true
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
@@ -430,13 +612,8 @@ classes:
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
- array:
- dimensions:
- - alias: num_waveforms
- - alias: num_samples
- range: numeric
- required: false
- multivalued: false
+ range: Units__waveforms
+ inlined: true
waveforms_index:
name: waveforms_index
annotations:
@@ -449,8 +626,6 @@ classes:
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
waveforms_index_index:
name: waveforms_index_index
@@ -464,8 +639,6 @@ classes:
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
Units__spike_times:
@@ -489,3 +662,97 @@ classes:
for the spike time to be between samples.
range: float64
required: false
+ Units__waveform_mean:
+ name: Units__waveform_mean
+ description: Spike waveform mean for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_mean)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_mean
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveform_sd:
+ name: Units__waveform_sd
+ description: Spike waveform standard deviation for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_sd)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_sd
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveforms:
+ name: Units__waveforms
+ description: Individual waveforms for each spike on each electrode. This is a
+ doubly indexed column. The 'waveforms_index' column indexes which waveforms
+ in this column belong to the same spike event for a given unit, where each waveform
+ was recorded from a different electrode. The 'waveforms_index_index' column
+ indexes the 'waveforms_index' column to indicate which spike events belong to
+ a given unit. For example, if the 'waveforms_index_index' column has values
+ [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
+ to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
+ column correspond to the 3 spike events of the second unit, and the next 1 element
+ of the 'waveforms_index' column corresponds to the 1 spike event of the third
+ unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
+ the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
+ that were recorded from 3 different electrodes for the first spike time of the
+ first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
+ for a graphical representation of this example. When there is only one electrode
+ for each unit (i.e., each spike time is associated with a single waveform),
+ then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
+ the number of spike events. The number of electrodes for each spike event should
+ be the same within a given unit. The 'electrodes' column should be used to indicate
+ which electrodes are associated with each unit, and the order of the waveforms
+ within a given unit x spike event should be in the same order as the electrodes
+ referenced in the 'electrodes' column of this table. The number of samples for
+ each waveform must be the same.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveforms)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveforms
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
index 8c6b076..99b02f7 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
@@ -27,12 +27,9 @@ classes:
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
- array:
- dimensions:
- - alias: num_times
- range: numeric
+ range: OptogeneticSeries__data
required: true
- multivalued: false
+ inlined: true
site:
name: site
annotations:
@@ -40,12 +37,80 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: OptogeneticStimulusSite
- range: string
tree_root: true
+ OptogeneticSeries__data:
+ name: OptogeneticSeries__data
+ description: Applied power for optogenetic stimulus, in watts.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for data, which is fixed to 'watts'.
+ ifabsent: string(watts)
+ range: text
+ required: true
+ equals_string: watts
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
@@ -61,13 +126,11 @@ classes:
description: Description of stimulation site.
range: text
required: true
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
@@ -75,7 +138,6 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
device:
name: device
annotations:
@@ -83,7 +145,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
index 17bb442..2147878 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
@@ -60,7 +60,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -80,17 +79,9 @@ classes:
data:
name: data
description: Signals from ROIs.
- range: numeric
+ range: RoiResponseSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_rois
+ inlined: true
rois:
name: rois
annotations:
@@ -104,9 +95,82 @@ classes:
on the ROIs stored in this timeseries.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
+ RoiResponseSeries__data:
+ name: RoiResponseSeries__data
+ description: Signals from ROIs.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_rois
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@@ -114,12 +178,19 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(DfOverF)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
Fluorescence:
name: Fluorescence
@@ -128,12 +199,19 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(Fluorescence)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
ImageSegmentation:
name: ImageSegmentation
@@ -146,12 +224,19 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: PlaneSegmentation
+ name:
+ name: name
+ ifabsent: string(ImageSegmentation)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: PlaneSegmentation
tree_root: true
PlaneSegmentation:
name: PlaneSegmentation
@@ -182,6 +267,13 @@ classes:
- alias: num_x
- alias: num_y
- alias: num_z
+ pixel_mask:
+ name: pixel_mask
+ description: 'Pixel masks for each ROI: a list of indices and weights for
+ the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ by the PlaneSegmentation'
+ range: PlaneSegmentation__pixel_mask
+ inlined: true
pixel_mask_index:
name: pixel_mask_index
annotations:
@@ -193,17 +285,13 @@ classes:
value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
- pixel_mask:
- name: pixel_mask
- description: 'Pixel masks for each ROI: a list of indices and weights for
- the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ voxel_mask:
+ name: voxel_mask
+ description: 'Voxel masks for each ROI: a list of indices and weights for
+ the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
- range: PlaneSegmentation__pixel_mask
- required: false
- multivalued: false
+ range: PlaneSegmentation__voxel_mask
inlined: true
voxel_mask_index:
name: voxel_mask_index
@@ -216,17 +304,6 @@ classes:
value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- voxel_mask:
- name: voxel_mask
- description: 'Voxel masks for each ROI: a list of indices and weights for
- the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
- by the PlaneSegmentation'
- range: PlaneSegmentation__voxel_mask
- required: false
- multivalued: false
inlined: true
reference_images:
name: reference_images
@@ -243,7 +320,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -269,24 +345,18 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Pixel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the pixel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
PlaneSegmentation__voxel_mask:
name: PlaneSegmentation__voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
@@ -307,32 +377,24 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Voxel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
z:
name: z
description: Voxel z-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the voxel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@@ -347,27 +409,21 @@ classes:
name: description
description: Description of the imaging plane.
range: text
- required: false
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
range: float32
- required: false
- multivalued: false
indicator:
name: indicator
description: Calcium indicator.
range: text
required: true
- multivalued: false
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
@@ -375,15 +431,12 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
range: ImagingPlane__manifold
- required: false
- multivalued: false
inlined: true
origin_coords:
name: origin_coords
@@ -391,8 +444,6 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
range: ImagingPlane__origin_coords
- required: false
- multivalued: false
inlined: true
grid_spacing:
name: grid_spacing
@@ -400,8 +451,6 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
range: ImagingPlane__grid_spacing
- required: false
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
@@ -423,8 +472,6 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
range: text
- required: false
- multivalued: false
optical_channel:
name: optical_channel
description: An optical channel used to record from an imaging plane.
@@ -440,7 +487,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -573,13 +619,11 @@ classes:
description: Description or other notes about the channel.
range: text
required: true
- multivalued: false
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
range: float32
required: true
- multivalued: false
tree_root: true
MotionCorrection:
name: MotionCorrection
@@ -588,12 +632,19 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: CorrectedImageStack
+ name:
+ name: name
+ ifabsent: string(MotionCorrection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: CorrectedImageStack
tree_root: true
CorrectedImageStack:
name: CorrectedImageStack
@@ -610,7 +661,6 @@ classes:
description: Image stack with frames shifted to the common coordinates.
range: ImageSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
xy_translation:
@@ -619,7 +669,6 @@ classes:
coordinates, for example, to align each frame to a reference image.
range: TimeSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
original:
@@ -629,7 +678,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
index 26b6ed6..d3d25ee 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
@@ -37,30 +37,24 @@ classes:
description: Phase response to stimulus on the first measured axis.
range: ImagingRetinotopy__axis_1_phase_map
required: true
- multivalued: false
inlined: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_1_power_map
- required: false
- multivalued: false
inlined: true
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
range: ImagingRetinotopy__axis_2_phase_map
required: true
- multivalued: false
inlined: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_2_power_map
- required: false
- multivalued: false
inlined: true
axis_descriptions:
name: axis_descriptions
@@ -79,16 +73,12 @@ classes:
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
range: ImagingRetinotopy__focal_depth_image
- required: false
- multivalued: false
inlined: true
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: ImagingRetinotopy__sign_map
- required: false
- multivalued: false
inlined: true
vasculature_image:
name: vasculature_image
@@ -96,7 +86,6 @@ classes:
[rows][columns]'
range: ImagingRetinotopy__vasculature_image
required: true
- multivalued: false
inlined: true
tree_root: true
ImagingRetinotopy__axis_1_phase_map:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
index 9aeec32..13fe72d 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
@@ -47,7 +47,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -56,7 +55,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
@@ -64,7 +62,6 @@ classes:
exact_number_dimensions: 1
range: TimeSeries
required: true
- multivalued: false
inlined: true
tree_root: true
Image:
@@ -189,7 +186,6 @@ classes:
external file.
range: TimeSeries__data
required: true
- multivalued: false
inlined: true
starting_time:
name: starting_time
@@ -197,8 +193,6 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
range: TimeSeries__starting_time
- required: false
- multivalued: false
inlined: true
timestamps:
name: timestamps
@@ -241,8 +235,6 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
range: TimeSeries__sync
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
tree_root: true
@@ -383,13 +375,24 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: NWBDataInterface
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of this collection of processed data.
+ range: text
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: NWBDataInterface
+ - range: DynamicTable
tree_root: true
Images:
name: Images
@@ -429,7 +432,5 @@ classes:
and only once, so the dataset should have the same length as the number
of images.
range: ImageReferences
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
index 9d96389..123714b 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
@@ -38,14 +38,11 @@ classes:
reference frame.
range: SpatialSeries__data
required: true
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
range: text
- required: false
- multivalued: false
tree_root: true
SpatialSeries__data:
name: SpatialSeries__data
@@ -59,6 +56,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
@@ -106,12 +148,19 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: IntervalSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEpochs)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: IntervalSeries
tree_root: true
BehavioralEvents:
name: BehavioralEvents
@@ -119,12 +168,19 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEvents)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
BehavioralTimeSeries:
name: BehavioralTimeSeries
@@ -132,36 +188,57 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralTimeSeries)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(PupilTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(EyeTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
CompassDirection:
name: CompassDirection
@@ -172,22 +249,36 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(CompassDirection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(Position)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
index 6fba341..0deca51 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
@@ -39,40 +39,6 @@ classes:
about the filter properties as possible.
range: text
required: false
- data:
- name: data
- description: Recorded voltage data.
- range: numeric
- required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - alias: num_samples
- electrodes:
- name: electrodes
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: DynamicTableRegion pointer to the electrodes that this time series
- was generated from.
- range: DynamicTableRegion
- required: true
- multivalued: false
- inlined: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
@@ -90,7 +56,109 @@ classes:
range: float32
required: false
multivalued: false
+ data:
+ name: data
+ description: Recorded voltage data.
+ range: ElectricalSeries__data
+ required: true
+ inlined: true
+ electrodes:
+ name: electrodes
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: DynamicTableRegion pointer to the electrodes that this time series
+ was generated from.
+ range: DynamicTableRegion
+ required: true
+ inlined: true
tree_root: true
+ ElectricalSeries__data:
+ name: ElectricalSeries__data
+ description: Recorded voltage data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. This value
+ is fixed to 'volts'. Actual stored values are not necessarily stored in
+ these units. To access the data in these units, multiply 'data' by 'conversion',
+ followed by 'channel_conversion' (if present), and then add 'offset'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - alias: num_samples
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@@ -111,19 +179,9 @@ classes:
data:
name: data
description: Spike waveforms.
- range: numeric
+ range: SpikeEventSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_events
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_events
- - alias: num_channels
- - alias: num_samples
+ inlined: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
@@ -137,6 +195,82 @@ classes:
required: true
multivalued: false
tree_root: true
+ SpikeEventSeries__data:
+ name: SpikeEventSeries__data
+ description: Spike waveforms.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for waveforms, which is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_samples
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_channels
+ - alias: num_samples
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
@@ -192,7 +326,6 @@ classes:
was generated from.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
EventDetection:
@@ -212,7 +345,6 @@ classes:
or dV/dT threshold, as well as relevant values.
range: text
required: true
- multivalued: false
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
@@ -241,7 +373,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ElectricalSeries
@@ -254,12 +385,19 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpikeEventSeries
+ name:
+ name: name
+ ifabsent: string(EventWaveform)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpikeEventSeries
tree_root: true
FilteredEphys:
name: FilteredEphys
@@ -276,12 +414,19 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(FilteredEphys)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
LFP:
name: LFP
@@ -290,12 +435,19 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(LFP)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
ElectrodeGroup:
name: ElectrodeGroup
@@ -323,8 +475,6 @@ classes:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
- required: false
- multivalued: false
inlined: true
device:
name: device
@@ -333,7 +483,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -356,24 +505,18 @@ classes:
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
y:
name: y
description: y coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
z:
name: z
description: z coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@@ -395,7 +538,6 @@ classes:
description: Filtering applied to data before generating mean/sd
range: text
required: true
- multivalued: false
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
@@ -427,7 +569,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Clustering
@@ -451,7 +592,6 @@ classes:
clusters curated using Klusters, etc)
range: text
required: true
- multivalued: false
num:
name: num
description: Cluster number of each event
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
index 0a9685b..cbe9de3 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
@@ -63,8 +63,6 @@ classes:
value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
timeseries:
name: timeseries
@@ -77,8 +75,6 @@ classes:
value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
- required: false
- multivalued: false
inlined: true
timeseries_index:
name: timeseries_index
@@ -91,7 +87,5 @@ classes:
value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
index 481256f..85b1f65 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
@@ -81,13 +81,11 @@ classes:
other files.
range: text
required: true
- multivalued: false
session_description:
name: session_description
description: A description of the experimental session and data in the file.
range: text
required: true
- multivalued: false
session_start_time:
name: session_start_time
description: 'Date and time of the experiment/session start. The date is stored
@@ -96,7 +94,6 @@ classes:
offset. Date accuracy is up to milliseconds.'
range: isodatetime
required: true
- multivalued: false
timestamps_reference_time:
name: timestamps_reference_time
description: 'Date and time corresponding to time zero of all timestamps.
@@ -106,7 +103,6 @@ classes:
times stored in the file use this time as reference (i.e., time zero).'
range: isodatetime
required: true
- multivalued: false
acquisition:
name: acquisition
description: Data streams recorded from the system, including ephys, ophys,
@@ -185,7 +181,6 @@ classes:
can exist in the present file or can be linked to a remote library file.
range: NWBFile__stimulus
required: true
- multivalued: false
inlined: true
inlined_as_list: true
general:
@@ -207,7 +202,6 @@ classes:
should not be created unless there is data to store within them.
range: NWBFile__general
required: true
- multivalued: false
inlined: true
inlined_as_list: true
intervals:
@@ -217,18 +211,18 @@ classes:
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
range: NWBFile__intervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
units:
name: units
description: Data about sorted spike units.
range: Units
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
+ specifications:
+ name: specifications
+ description: Nested dictionary of schema specifications
+ range: dict
tree_root: true
NWBFile__stimulus:
name: NWBFile__stimulus
@@ -300,14 +294,10 @@ classes:
name: data_collection
description: Notes about data collection and analysis.
range: text
- required: false
- multivalued: false
experiment_description:
name: experiment_description
description: General description of the experiment.
range: text
- required: false
- multivalued: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
@@ -322,8 +312,6 @@ classes:
name: institution
description: Institution(s) where experiment was performed.
range: text
- required: false
- multivalued: false
keywords:
name: keywords
description: Terms to search over.
@@ -337,28 +325,20 @@ classes:
name: lab
description: Laboratory where experiment was performed.
range: text
- required: false
- multivalued: false
notes:
name: notes
description: Notes about the experiment.
range: text
- required: false
- multivalued: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
range: text
- required: false
- multivalued: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
range: text
- required: false
- multivalued: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
@@ -372,52 +352,31 @@ classes:
name: session_id
description: Lab-specific ID for the session.
range: text
- required: false
- multivalued: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
range: text
- required: false
- multivalued: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
range: general__source_script
- required: false
- multivalued: false
inlined: true
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
range: text
- required: false
- multivalued: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
range: text
- required: false
- multivalued: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
range: text
- required: false
- multivalued: false
- lab_meta_data:
- name: lab_meta_data
- description: Place-holder than can be extended so that lab-specific meta-data
- can be placed in /general.
- range: LabMetaData
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
@@ -432,24 +391,18 @@ classes:
description: Information about the animal or person from which the data was
measured.
range: Subject
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
range: general__extracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
range: general__intracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
optogenetics:
@@ -468,6 +421,14 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
+ value:
+ name: value
+ description: Place-holder than can be extended so that lab-specific meta-data
+ can be placed in /general.
+ range: LabMetaData
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
general__source_script:
name: general__source_script
description: Script file or link to public source code used to create this NWB
@@ -500,22 +461,19 @@ classes:
range: string
required: true
equals_string: extracellular_ephys
- electrode_group:
- name: electrode_group
- description: Physical group of electrodes.
- range: ElectrodeGroup
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
range: extracellular_ephys__electrodes
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
+ value:
+ name: value
+ description: Physical group of electrodes.
+ range: ElectrodeGroup
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
extracellular_ephys__electrodes:
name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
@@ -660,16 +618,6 @@ classes:
etc. If this changes between TimeSeries, filter description should be stored
as a text attribute for each TimeSeries.'
range: text
- required: false
- multivalued: false
- intracellular_electrode:
- name: intracellular_electrode
- description: An intracellular electrode.
- range: IntracellularElectrode
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
sweep_table:
name: sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries.
@@ -677,8 +625,6 @@ classes:
tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
range: SweepTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
intracellular_recordings:
@@ -696,8 +642,6 @@ classes:
to an electrode is also common in intracellular electrophysiology, in which
case other TimeSeries may be used.
range: IntracellularRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
simultaneous_recordings:
@@ -706,8 +650,6 @@ classes:
the IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
range: SimultaneousRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
sequential_recordings:
@@ -717,8 +659,6 @@ classes:
together sequential recordings where the a sequence of stimuli of the same
type with varying parameters have been presented in a sequence.
range: SequentialRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
repetitions:
@@ -728,8 +668,6 @@ classes:
type of stimulus, the RepetitionsTable table is typically used to group
sets of stimuli applied in sequence.
range: RepetitionsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
experimental_conditions:
@@ -737,8 +675,13 @@ classes:
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
range: ExperimentalConditionsTable
- required: false
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ value:
+ name: value
+ description: An intracellular electrode.
+ range: IntracellularElectrode
+ multivalued: true
inlined: true
inlined_as_list: false
NWBFile__intervals:
@@ -759,32 +702,25 @@ classes:
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
- time_intervals:
- name: time_intervals
+ value:
+ name: value
description: Optional additional table(s) for describing other experimental
time intervals.
range: TimeIntervals
- required: false
multivalued: true
inlined: true
inlined_as_list: false
@@ -813,59 +749,41 @@ classes:
name: age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
range: Subject__age
- required: false
- multivalued: false
inlined: true
date_of_birth:
name: date_of_birth
description: Date of birth of subject. Can be supplied instead of 'age'.
range: isodatetime
- required: false
- multivalued: false
description:
name: description
description: Description of subject and where subject came from (e.g., breeder,
if animal).
range: text
- required: false
- multivalued: false
genotype:
name: genotype
description: Genetic strain. If absent, assume Wild Type (WT).
range: text
- required: false
- multivalued: false
sex:
name: sex
description: Gender of subject.
range: text
- required: false
- multivalued: false
species:
name: species
description: Species of subject.
range: text
- required: false
- multivalued: false
strain:
name: strain
description: Strain of subject.
range: text
- required: false
- multivalued: false
subject_id:
name: subject_id
description: ID of animal/person used/participating in experiment (lab convention).
range: text
- required: false
- multivalued: false
weight:
name: weight
description: Weight at time of experiment, at time of surgery and at other
important times.
range: text
- required: false
- multivalued: false
tree_root: true
Subject__age:
name: Subject__age
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
index 140e8c8..95b2598 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
@@ -41,15 +41,12 @@ classes:
description: Recorded voltage or current.
range: PatchClampSeries__data
required: true
- multivalued: false
inlined: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
range: float32
- required: false
- multivalued: false
electrode:
name: electrode
annotations:
@@ -57,7 +54,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: IntracellularElectrode
@@ -74,6 +70,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -99,31 +140,24 @@ classes:
identifier: true
range: string
required: true
+ bias_current:
+ name: bias_current
+ description: Bias current, in amps.
+ range: float32
+ bridge_balance:
+ name: bridge_balance
+ description: Bridge balance, in ohms.
+ range: float32
+ capacitance_compensation:
+ name: capacitance_compensation
+ description: Capacitance compensation, in farads.
+ range: float32
data:
name: data
description: Recorded voltage.
range: CurrentClampSeries__data
required: true
- multivalued: false
inlined: true
- bias_current:
- name: bias_current
- description: Bias current, in amps.
- range: float32
- required: false
- multivalued: false
- bridge_balance:
- name: bridge_balance
- description: Bridge balance, in ohms.
- range: float32
- required: false
- multivalued: false
- capacitance_compensation:
- name: capacitance_compensation
- description: Capacitance compensation, in farads.
- range: float32
- required: false
- multivalued: false
tree_root: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
@@ -136,6 +170,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -148,8 +227,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@@ -176,19 +257,16 @@ classes:
description: Bias current, in amps, fixed to 0.0.
range: float32
required: true
- multivalued: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
range: float32
required: true
- multivalued: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
range: float32
required: true
- multivalued: false
tree_root: true
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
@@ -205,7 +283,6 @@ classes:
description: Stimulus current applied.
range: CurrentClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
CurrentClampStimulusSeries__data:
@@ -219,6 +296,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -231,8 +353,10 @@ classes:
equals_string: amperes
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@@ -245,88 +369,48 @@ classes:
identifier: true
range: string
required: true
- data:
- name: data
- description: Recorded current.
- range: VoltageClampSeries__data
- required: true
- multivalued: false
- inlined: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
range: VoltageClampSeries__capacitance_fast
- required: false
- multivalued: false
inlined: true
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
range: VoltageClampSeries__capacitance_slow
- required: false
- multivalued: false
+ inlined: true
+ data:
+ name: data
+ description: Recorded current.
+ range: VoltageClampSeries__data
+ required: true
inlined: true
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
range: VoltageClampSeries__resistance_comp_bandwidth
- required: false
- multivalued: false
inlined: true
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
range: VoltageClampSeries__resistance_comp_correction
- required: false
- multivalued: false
inlined: true
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
range: VoltageClampSeries__resistance_comp_prediction
- required: false
- multivalued: false
inlined: true
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
range: VoltageClampSeries__whole_cell_capacitance_comp
- required: false
- multivalued: false
inlined: true
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
range: VoltageClampSeries__whole_cell_series_resistance_comp
- required: false
- multivalued: false
inlined: true
tree_root: true
- VoltageClampSeries__data:
- name: VoltageClampSeries__data
- description: Recorded current.
- attributes:
- name:
- name: name
- ifabsent: string(data)
- identifier: true
- range: string
- required: true
- equals_string: data
- unit:
- name: unit
- description: Base unit of measurement for working with the data. which is
- fixed to 'amperes'. Actual stored values are not necessarily stored in these
- units. To access the data in these units, multiply 'data' by 'conversion'
- and add 'offset'.
- ifabsent: string(amperes)
- range: text
- required: true
- equals_string: amperes
- value:
- name: value
- range: AnyType
- required: true
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
@@ -371,6 +455,78 @@ classes:
name: value
range: float32
required: true
+ VoltageClampSeries__data:
+ name: VoltageClampSeries__data
+ description: Recorded current.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. which is
+ fixed to 'amperes'. Actual stored values are not necessarily stored in these
+ units. To access the data in these units, multiply 'data' by 'conversion'
+ and add 'offset'.
+ ifabsent: string(amperes)
+ range: text
+ required: true
+ equals_string: amperes
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
@@ -501,7 +657,6 @@ classes:
description: Stimulus voltage applied.
range: VoltageClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
VoltageClampStimulusSeries__data:
@@ -515,6 +670,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -527,8 +727,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@@ -543,52 +745,37 @@ classes:
name: cell_id
description: unique ID of the cell
range: text
- required: false
- multivalued: false
description:
name: description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
range: text
required: true
- multivalued: false
filtering:
name: filtering
description: Electrode specific filtering.
range: text
- required: false
- multivalued: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
range: text
- required: false
- multivalued: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
range: text
- required: false
- multivalued: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
range: text
- required: false
- multivalued: false
seal:
name: seal
description: Information about seal used for recording.
range: text
- required: false
- multivalued: false
slice:
name: slice
description: Information about slice used for recording.
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
@@ -596,7 +783,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -615,15 +801,6 @@ classes:
identifier: true
range: string
required: true
- sweep_number:
- name: sweep_number
- description: Sweep number of the PatchClampSeries in that row.
- array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: uint32
- required: true
- multivalued: false
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
@@ -646,8 +823,16 @@ classes:
description: Index for series.
range: VectorIndex
required: true
- multivalued: false
inlined: true
+ sweep_number:
+ name: sweep_number
+ description: Sweep number of the PatchClampSeries in that row.
+ array:
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: uint32
+ required: true
+ multivalued: false
tree_root: true
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
@@ -707,7 +892,6 @@ classes:
recording (rows).
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularResponsesTable:
@@ -740,7 +924,6 @@ classes:
recording (rows)
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularRecordingsTable:
@@ -782,15 +965,6 @@ classes:
description: Table for storing intracellular electrode related metadata.
range: IntracellularElectrodesTable
required: true
- multivalued: false
- inlined: true
- inlined_as_list: false
- stimuli:
- name: stimuli
- description: Table for storing intracellular stimulus related metadata.
- range: IntracellularStimuliTable
- required: true
- multivalued: false
inlined: true
inlined_as_list: false
responses:
@@ -798,7 +972,13 @@ classes:
description: Table for storing intracellular response related metadata.
range: IntracellularResponsesTable
required: true
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ stimuli:
+ name: stimuli
+ description: Table for storing intracellular stimulus related metadata.
+ range: IntracellularStimuliTable
+ required: true
inlined: true
inlined_as_list: false
tree_root: true
@@ -822,7 +1002,6 @@ classes:
table.
range: SimultaneousRecordingsTable__recordings
required: true
- multivalued: false
inlined: true
recordings_index:
name: recordings_index
@@ -836,7 +1015,6 @@ classes:
description: Index dataset for the recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
SimultaneousRecordingsTable__recordings:
@@ -881,7 +1059,6 @@ classes:
table.
range: SequentialRecordingsTable__simultaneous_recordings
required: true
- multivalued: false
inlined: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
@@ -895,7 +1072,6 @@ classes:
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
stimulus_type:
name: stimulus_type
@@ -949,7 +1125,6 @@ classes:
table.
range: RepetitionsTable__sequential_recordings
required: true
- multivalued: false
inlined: true
sequential_recordings_index:
name: sequential_recordings_index
@@ -963,7 +1138,6 @@ classes:
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
RepetitionsTable__sequential_recordings:
@@ -1005,7 +1179,6 @@ classes:
description: A reference to one or more rows in the RepetitionsTable table.
range: ExperimentalConditionsTable__repetitions
required: true
- multivalued: false
inlined: true
repetitions_index:
name: repetitions_index
@@ -1019,7 +1192,6 @@ classes:
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
ExperimentalConditionsTable__repetitions:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
index 4406284..4da87da 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
@@ -91,21 +91,9 @@ classes:
name: data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
- range: numeric
+ range: ImageSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - alias: z
+ inlined: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
@@ -123,8 +111,6 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
range: ImageSeries__external_file
- required: false
- multivalued: false
inlined: true
format:
name: format
@@ -132,22 +118,98 @@ classes:
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
+ ifabsent: string(raw)
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Device
- range: string
tree_root: true
+ ImageSeries__data:
+ name: ImageSeries__data
+ description: Binary data representing images across frames. If data are stored
+ in an external file, this should be an empty 3D array.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - alias: z
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
@@ -206,7 +268,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -226,12 +287,16 @@ classes:
identifier: true
range: string
required: true
+ data:
+ name: data
+ description: Images presented to subject, either grayscale or RGB
+ range: OpticalSeries__data
+ required: true
+ inlined: true
distance:
name: distance
description: Distance from camera/monitor to target/eye.
range: float32
- required: false
- multivalued: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -247,12 +312,78 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
- data:
- name: data
- description: Images presented to subject, either grayscale or RGB
- range: numeric
+ orientation:
+ name: orientation
+ description: Description of image relative to some reference frame (e.g.,
+ which way is up). Must also specify frame of reference.
+ range: text
+ tree_root: true
+ OpticalSeries__data:
+ name: OpticalSeries__data
+ description: Images presented to subject, either grayscale or RGB
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
required: true
- multivalued: false
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -266,14 +397,6 @@ classes:
- alias: y
- alias: r_g_b
exact_cardinality: 3
- orientation:
- name: orientation
- description: Description of image relative to some reference frame (e.g.,
- which way is up). Must also specify frame of reference.
- range: text
- required: false
- multivalued: false
- tree_root: true
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@@ -294,20 +417,15 @@ classes:
name: data
description: Index of the image (using zero-indexing) in the linked Images
object.
- array:
- dimensions:
- - alias: num_times
- range: uint32
+ range: IndexSeries__data
required: true
- multivalued: false
+ inlined: true
indexed_timeseries:
name: indexed_timeseries
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -318,10 +436,62 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Images
- range: string
tree_root: true
+ IndexSeries__data:
+ name: IndexSeries__data
+ description: Index of the image (using zero-indexing) in the linked Images object.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: This field is unused by IndexSeries.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: This field is unused by IndexSeries.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: This field is unused by IndexSeries.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: This field is unused by IndexSeries and has the value N/A.
+ ifabsent: string(N/A)
+ range: text
+ required: true
+ equals_string: N/A
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: uint32
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
index e42c742..e36f824 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
index ced8985..021044b 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
@@ -38,7 +38,6 @@ classes:
description: Values of each feature at each time.
range: AbstractFeatureSeries__data
required: true
- multivalued: false
inlined: true
feature_units:
name: feature_units
@@ -70,6 +69,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Since there can be different units for different features, store
@@ -105,13 +149,79 @@ classes:
data:
name: data
description: Annotations made during an experiment.
+ range: AnnotationSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ AnnotationSeries__data:
+ name: AnnotationSeries__data
+ description: Annotations made during an experiment.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: text
- required: true
- multivalued: false
- tree_root: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
@@ -131,13 +241,79 @@ classes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
+ range: IntervalSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ IntervalSeries__data:
+ name: IntervalSeries__data
+ description: Use values >0 if interval started, <0 if interval ended.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: int8
- required: true
- multivalued: false
- tree_root: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -153,14 +329,12 @@ classes:
description: Data decomposed into frequency bands.
range: DecompositionSeries__data
required: true
- multivalued: false
inlined: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
range: text
required: true
- multivalued: false
source_channels:
name: source_channels
annotations:
@@ -173,8 +347,6 @@ classes:
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
bands:
name: bands
@@ -182,7 +354,6 @@ classes:
from. There should be one row in this table for each band.
range: DecompositionSeries__bands
required: true
- multivalued: false
inlined: true
inlined_as_list: true
source_timeseries:
@@ -191,8 +362,6 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: TimeSeries
@@ -209,6 +378,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -289,63 +503,13 @@ classes:
identifier: true
range: string
required: true
- spike_times_index:
- name: spike_times_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the spike_times dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- spike_times:
- name: spike_times
- description: Spike times for each unit in seconds.
- range: Units__spike_times
- required: false
- multivalued: false
- inlined: true
- obs_intervals_index:
- name: obs_intervals_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the obs_intervals dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- obs_intervals:
- name: obs_intervals
- description: Observation intervals for each unit.
+ electrode_group:
+ name: electrode_group
+ description: Electrode group that each spike unit came from.
array:
- dimensions:
- - alias: num_intervals
- - alias: start_end
- exact_cardinality: 2
- range: float64
- required: false
- multivalued: false
- electrodes_index:
- name: electrodes_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into electrodes.
- range: VectorIndex
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: ElectrodeGroup
required: false
multivalued: false
inlined: true
@@ -360,51 +524,69 @@ classes:
value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
- electrode_group:
- name: electrode_group
- description: Electrode group that each spike unit came from.
+ electrodes_index:
+ name: electrodes_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into electrodes.
+ range: VectorIndex
+ inlined: true
+ obs_intervals:
+ name: obs_intervals
+ description: Observation intervals for each unit.
array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: ElectrodeGroup
+ dimensions:
+ - alias: num_intervals
+ - alias: start_end
+ exact_cardinality: 2
+ range: float64
required: false
multivalued: false
+ obs_intervals_index:
+ name: obs_intervals_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the obs_intervals dataset.
+ range: VectorIndex
+ inlined: true
+ spike_times:
+ name: spike_times
+ description: Spike times for each unit in seconds.
+ range: Units__spike_times
+ inlined: true
+ spike_times_index:
+ name: spike_times_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the spike_times dataset.
+ range: VectorIndex
inlined: true
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_mean
+ inlined: true
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_sd
+ inlined: true
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
@@ -430,13 +612,8 @@ classes:
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
- array:
- dimensions:
- - alias: num_waveforms
- - alias: num_samples
- range: numeric
- required: false
- multivalued: false
+ range: Units__waveforms
+ inlined: true
waveforms_index:
name: waveforms_index
annotations:
@@ -449,8 +626,6 @@ classes:
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
waveforms_index_index:
name: waveforms_index_index
@@ -464,8 +639,6 @@ classes:
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
Units__spike_times:
@@ -489,3 +662,97 @@ classes:
for the spike time to be between samples.
range: float64
required: false
+ Units__waveform_mean:
+ name: Units__waveform_mean
+ description: Spike waveform mean for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_mean)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_mean
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveform_sd:
+ name: Units__waveform_sd
+ description: Spike waveform standard deviation for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_sd)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_sd
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveforms:
+ name: Units__waveforms
+ description: Individual waveforms for each spike on each electrode. This is a
+ doubly indexed column. The 'waveforms_index' column indexes which waveforms
+ in this column belong to the same spike event for a given unit, where each waveform
+ was recorded from a different electrode. The 'waveforms_index_index' column
+ indexes the 'waveforms_index' column to indicate which spike events belong to
+ a given unit. For example, if the 'waveforms_index_index' column has values
+ [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
+ to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
+ column correspond to the 3 spike events of the second unit, and the next 1 element
+ of the 'waveforms_index' column corresponds to the 1 spike event of the third
+ unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
+ the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
+ that were recorded from 3 different electrodes for the first spike time of the
+ first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
+ for a graphical representation of this example. When there is only one electrode
+ for each unit (i.e., each spike time is associated with a single waveform),
+ then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
+ the number of spike events. The number of electrodes for each spike event should
+ be the same within a given unit. The 'electrodes' column should be used to indicate
+ which electrodes are associated with each unit, and the order of the waveforms
+ within a given unit x spike event should be in the same order as the electrodes
+ referenced in the 'electrodes' column of this table. The number of samples for
+ each waveform must be the same.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveforms)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveforms
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
index b485822..aa9b528 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
@@ -27,12 +27,9 @@ classes:
data:
name: data
description: Applied power for optogenetic stimulus, in watts.
- array:
- dimensions:
- - alias: num_times
- range: numeric
+ range: OptogeneticSeries__data
required: true
- multivalued: false
+ inlined: true
site:
name: site
annotations:
@@ -40,12 +37,80 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: OptogeneticStimulusSite
- range: string
tree_root: true
+ OptogeneticSeries__data:
+ name: OptogeneticSeries__data
+ description: Applied power for optogenetic stimulus, in watts.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for data, which is fixed to 'watts'.
+ ifabsent: string(watts)
+ range: text
+ required: true
+ equals_string: watts
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
@@ -61,13 +126,11 @@ classes:
description: Description of stimulation site.
range: text
required: true
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
@@ -75,7 +138,6 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
device:
name: device
annotations:
@@ -83,7 +145,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
index 3da9ec5..d608587 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
@@ -26,11 +26,31 @@ classes:
identifier: true
range: string
required: true
+ binning:
+ name: binning
+ description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
+ range: uint8
+ required: false
+ exposure_time:
+ name: exposure_time
+ description: Exposure time of the sample; often the inverse of the frequency.
+ range: float32
+ required: false
+ intensity:
+ name: intensity
+ description: Intensity of the excitation in mW/mm^2, if known.
+ range: float32
+ required: false
pmt_gain:
name: pmt_gain
description: Photomultiplier gain.
range: float32
required: false
+ power:
+ name: power
+ description: Power of the excitation in mW, if known.
+ range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
@@ -38,26 +58,6 @@ classes:
be stored w/ the actual data.
range: float32
required: false
- exposure_time:
- name: exposure_time
- description: Exposure time of the sample; often the inverse of the frequency.
- range: float32
- required: false
- binning:
- name: binning
- description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
- range: uint8
- required: false
- power:
- name: power
- description: Power of the excitation in mW, if known.
- range: float32
- required: false
- intensity:
- name: intensity
- description: Intensity of the excitation in mW/mm^2, if known.
- range: float32
- required: false
imaging_plane:
name: imaging_plane
annotations:
@@ -65,7 +65,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -115,7 +114,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -135,17 +133,9 @@ classes:
data:
name: data
description: Signals from ROIs.
- range: numeric
+ range: RoiResponseSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_rois
+ inlined: true
rois:
name: rois
annotations:
@@ -159,9 +149,82 @@ classes:
on the ROIs stored in this timeseries.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
+ RoiResponseSeries__data:
+ name: RoiResponseSeries__data
+ description: Signals from ROIs.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_rois
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@@ -169,12 +232,19 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(DfOverF)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
Fluorescence:
name: Fluorescence
@@ -183,12 +253,19 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(Fluorescence)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
ImageSegmentation:
name: ImageSegmentation
@@ -201,12 +278,19 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: PlaneSegmentation
+ name:
+ name: name
+ ifabsent: string(ImageSegmentation)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: PlaneSegmentation
tree_root: true
PlaneSegmentation:
name: PlaneSegmentation
@@ -237,6 +321,13 @@ classes:
- alias: num_x
- alias: num_y
- alias: num_z
+ pixel_mask:
+ name: pixel_mask
+ description: 'Pixel masks for each ROI: a list of indices and weights for
+ the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ by the PlaneSegmentation'
+ range: PlaneSegmentation__pixel_mask
+ inlined: true
pixel_mask_index:
name: pixel_mask_index
annotations:
@@ -248,17 +339,13 @@ classes:
value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
- pixel_mask:
- name: pixel_mask
- description: 'Pixel masks for each ROI: a list of indices and weights for
- the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ voxel_mask:
+ name: voxel_mask
+ description: 'Voxel masks for each ROI: a list of indices and weights for
+ the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
- range: PlaneSegmentation__pixel_mask
- required: false
- multivalued: false
+ range: PlaneSegmentation__voxel_mask
inlined: true
voxel_mask_index:
name: voxel_mask_index
@@ -271,17 +358,6 @@ classes:
value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- voxel_mask:
- name: voxel_mask
- description: 'Voxel masks for each ROI: a list of indices and weights for
- the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
- by the PlaneSegmentation'
- range: PlaneSegmentation__voxel_mask
- required: false
- multivalued: false
inlined: true
reference_images:
name: reference_images
@@ -298,7 +374,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -324,24 +399,18 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Pixel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the pixel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
PlaneSegmentation__voxel_mask:
name: PlaneSegmentation__voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
@@ -362,32 +431,24 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Voxel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
z:
name: z
description: Voxel z-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the voxel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@@ -402,27 +463,21 @@ classes:
name: description
description: Description of the imaging plane.
range: text
- required: false
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
range: float32
- required: false
- multivalued: false
indicator:
name: indicator
description: Calcium indicator.
range: text
required: true
- multivalued: false
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
@@ -430,15 +485,12 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
range: ImagingPlane__manifold
- required: false
- multivalued: false
inlined: true
origin_coords:
name: origin_coords
@@ -446,8 +498,6 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
range: ImagingPlane__origin_coords
- required: false
- multivalued: false
inlined: true
grid_spacing:
name: grid_spacing
@@ -455,8 +505,6 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
range: ImagingPlane__grid_spacing
- required: false
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
@@ -478,8 +526,6 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
range: text
- required: false
- multivalued: false
optical_channel:
name: optical_channel
description: An optical channel used to record from an imaging plane.
@@ -495,7 +541,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -628,13 +673,11 @@ classes:
description: Description or other notes about the channel.
range: text
required: true
- multivalued: false
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
range: float32
required: true
- multivalued: false
tree_root: true
MotionCorrection:
name: MotionCorrection
@@ -643,12 +686,19 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: CorrectedImageStack
+ name:
+ name: name
+ ifabsent: string(MotionCorrection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: CorrectedImageStack
tree_root: true
CorrectedImageStack:
name: CorrectedImageStack
@@ -665,7 +715,6 @@ classes:
description: Image stack with frames shifted to the common coordinates.
range: ImageSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
xy_translation:
@@ -674,7 +723,6 @@ classes:
coordinates, for example, to align each frame to a reference image.
range: TimeSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
original:
@@ -684,7 +732,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
index c1fce82..33116cf 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
@@ -37,30 +37,24 @@ classes:
description: Phase response to stimulus on the first measured axis.
range: ImagingRetinotopy__axis_1_phase_map
required: true
- multivalued: false
inlined: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_1_power_map
- required: false
- multivalued: false
inlined: true
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
range: ImagingRetinotopy__axis_2_phase_map
required: true
- multivalued: false
inlined: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_2_power_map
- required: false
- multivalued: false
inlined: true
axis_descriptions:
name: axis_descriptions
@@ -79,16 +73,12 @@ classes:
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
range: ImagingRetinotopy__focal_depth_image
- required: false
- multivalued: false
inlined: true
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: ImagingRetinotopy__sign_map
- required: false
- multivalued: false
inlined: true
vasculature_image:
name: vasculature_image
@@ -96,7 +86,6 @@ classes:
[rows][columns]'
range: ImagingRetinotopy__vasculature_image
required: true
- multivalued: false
inlined: true
tree_root: true
ImagingRetinotopy__axis_1_phase_map:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml
index 7c3450a..e158341 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml
@@ -47,7 +47,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
count:
name: count
description: Number of data samples available in this time series, during
@@ -56,7 +55,6 @@ classes:
exact_number_dimensions: 1
range: int32
required: true
- multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
@@ -64,7 +62,6 @@ classes:
exact_number_dimensions: 1
range: TimeSeries
required: true
- multivalued: false
inlined: true
tree_root: true
Image:
@@ -189,7 +186,6 @@ classes:
external file.
range: TimeSeries__data
required: true
- multivalued: false
inlined: true
starting_time:
name: starting_time
@@ -197,8 +193,6 @@ classes:
uniformly spaced, the timestamp of the first sample can be specified and
all subsequent ones calculated from the sampling rate attribute.
range: TimeSeries__starting_time
- required: false
- multivalued: false
inlined: true
timestamps:
name: timestamps
@@ -241,8 +235,6 @@ classes:
external to the NWB file, in files storing raw data. Once timestamp data
is calculated, the contents of 'sync' are mostly for archival purposes.
range: TimeSeries__sync
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
tree_root: true
@@ -383,13 +375,24 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: NWBDataInterface
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of this collection of processed data.
+ range: text
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: NWBDataInterface
+ - range: DynamicTable
tree_root: true
Images:
name: Images
@@ -429,7 +432,5 @@ classes:
and only once, so the dataset should have the same length as the number
of images.
range: ImageReferences
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
index 32ff4f8..6a4ec81 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
@@ -38,14 +38,11 @@ classes:
reference frame.
range: SpatialSeries__data
required: true
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
description: Description defining what exactly 'straight-ahead' means.
range: text
- required: false
- multivalued: false
tree_root: true
SpatialSeries__data:
name: SpatialSeries__data
@@ -59,6 +56,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
@@ -106,12 +148,19 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: IntervalSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEpochs)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: IntervalSeries
tree_root: true
BehavioralEvents:
name: BehavioralEvents
@@ -119,12 +168,19 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralEvents)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
BehavioralTimeSeries:
name: BehavioralTimeSeries
@@ -132,36 +188,57 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(BehavioralTimeSeries)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
PupilTracking:
name: PupilTracking
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeSeries
+ name:
+ name: name
+ ifabsent: string(PupilTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: TimeSeries
tree_root: true
EyeTracking:
name: EyeTracking
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(EyeTracking)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
CompassDirection:
name: CompassDirection
@@ -172,22 +249,36 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(CompassDirection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
Position:
name: Position
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpatialSeries
+ name:
+ name: name
+ ifabsent: string(Position)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpatialSeries
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
index 71eadb4..42927b6 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
@@ -39,40 +39,6 @@ classes:
about the filter properties as possible.
range: text
required: false
- data:
- name: data
- description: Recorded voltage data.
- range: numeric
- required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - array:
- dimensions:
- - alias: num_times
- - alias: num_channels
- - alias: num_samples
- electrodes:
- name: electrodes
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: DynamicTableRegion pointer to the electrodes that this time series
- was generated from.
- range: DynamicTableRegion
- required: true
- multivalued: false
- inlined: true
channel_conversion:
name: channel_conversion
description: Channel-specific conversion factor. Multiply the data in the
@@ -90,7 +56,109 @@ classes:
range: float32
required: false
multivalued: false
+ data:
+ name: data
+ description: Recorded voltage data.
+ range: ElectricalSeries__data
+ required: true
+ inlined: true
+ electrodes:
+ name: electrodes
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: DynamicTableRegion pointer to the electrodes that this time series
+ was generated from.
+ range: DynamicTableRegion
+ required: true
+ inlined: true
tree_root: true
+ ElectricalSeries__data:
+ name: ElectricalSeries__data
+ description: Recorded voltage data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. This value
+ is fixed to 'volts'. Actual stored values are not necessarily stored in
+ these units. To access the data in these units, multiply 'data' by 'conversion',
+ followed by 'channel_conversion' (if present), and then add 'offset'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_channels
+ - alias: num_samples
SpikeEventSeries:
name: SpikeEventSeries
description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold
@@ -111,19 +179,9 @@ classes:
data:
name: data
description: Spike waveforms.
- range: numeric
+ range: SpikeEventSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_events
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_events
- - alias: num_channels
- - alias: num_samples
+ inlined: true
timestamps:
name: timestamps
description: Timestamps for samples stored in data, in seconds, relative to
@@ -137,6 +195,82 @@ classes:
required: true
multivalued: false
tree_root: true
+ SpikeEventSeries__data:
+ name: SpikeEventSeries__data
+ description: Spike waveforms.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for waveforms, which is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: true
+ equals_string: volts
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_samples
+ - array:
+ dimensions:
+ - alias: num_events
+ - alias: num_channels
+ - alias: num_samples
FeatureExtraction:
name: FeatureExtraction
description: Features, such as PC1 and PC2, that are extracted from signals stored
@@ -192,7 +326,6 @@ classes:
was generated from.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
EventDetection:
@@ -212,7 +345,6 @@ classes:
or dV/dT threshold, as well as relevant values.
range: text
required: true
- multivalued: false
source_idx:
name: source_idx
description: Indices (zero-based) into source ElectricalSeries::data array
@@ -241,7 +373,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ElectricalSeries
@@ -254,12 +385,19 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: SpikeEventSeries
+ name:
+ name: name
+ ifabsent: string(EventWaveform)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: SpikeEventSeries
tree_root: true
FilteredEphys:
name: FilteredEphys
@@ -276,12 +414,19 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(FilteredEphys)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
LFP:
name: LFP
@@ -290,12 +435,19 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: ElectricalSeries
+ name:
+ name: name
+ ifabsent: string(LFP)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: ElectricalSeries
tree_root: true
ElectrodeGroup:
name: ElectrodeGroup
@@ -323,8 +475,6 @@ classes:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
- required: false
- multivalued: false
inlined: true
device:
name: device
@@ -333,7 +483,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -356,24 +505,18 @@ classes:
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
y:
name: y
description: y coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
z:
name: z
description: z coordinate
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ClusterWaveforms:
name: ClusterWaveforms
description: DEPRECATED The mean waveform shape, including standard deviation,
@@ -395,7 +538,6 @@ classes:
description: Filtering applied to data before generating mean/sd
range: text
required: true
- multivalued: false
waveform_mean:
name: waveform_mean
description: The mean waveform for each cluster, using the same indices for
@@ -427,7 +569,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Clustering
@@ -451,7 +592,6 @@ classes:
clusters curated using Klusters, etc)
range: text
required: true
- multivalued: false
num:
name: num
description: Cluster number of each event
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
index 471b87a..e556749 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
@@ -63,8 +63,6 @@ classes:
value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
timeseries:
name: timeseries
@@ -77,8 +75,6 @@ classes:
value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
- required: false
- multivalued: false
inlined: true
timeseries_index:
name: timeseries_index
@@ -91,7 +87,5 @@ classes:
value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml
index a6b27f5..35f877e 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml
@@ -81,13 +81,11 @@ classes:
other files.
range: text
required: true
- multivalued: false
session_description:
name: session_description
description: A description of the experimental session and data in the file.
range: text
required: true
- multivalued: false
session_start_time:
name: session_start_time
description: 'Date and time of the experiment/session start. The date is stored
@@ -96,7 +94,6 @@ classes:
offset. Date accuracy is up to milliseconds.'
range: isodatetime
required: true
- multivalued: false
timestamps_reference_time:
name: timestamps_reference_time
description: 'Date and time corresponding to time zero of all timestamps.
@@ -106,7 +103,6 @@ classes:
times stored in the file use this time as reference (i.e., time zero).'
range: isodatetime
required: true
- multivalued: false
acquisition:
name: acquisition
description: Data streams recorded from the system, including ephys, ophys,
@@ -185,7 +181,6 @@ classes:
can exist in the present file or can be linked to a remote library file.
range: NWBFile__stimulus
required: true
- multivalued: false
inlined: true
inlined_as_list: true
general:
@@ -207,7 +202,6 @@ classes:
should not be created unless there is data to store within them.
range: NWBFile__general
required: true
- multivalued: false
inlined: true
inlined_as_list: true
intervals:
@@ -217,18 +211,18 @@ classes:
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
range: NWBFile__intervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
units:
name: units
description: Data about sorted spike units.
range: Units
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
+ specifications:
+ name: specifications
+ description: Nested dictionary of schema specifications
+ range: dict
tree_root: true
NWBFile__stimulus:
name: NWBFile__stimulus
@@ -302,14 +296,10 @@ classes:
name: data_collection
description: Notes about data collection and analysis.
range: text
- required: false
- multivalued: false
experiment_description:
name: experiment_description
description: General description of the experiment.
range: text
- required: false
- multivalued: false
experimenter:
name: experimenter
description: Name of person(s) who performed the experiment. Can also specify
@@ -324,8 +314,6 @@ classes:
name: institution
description: Institution(s) where experiment was performed.
range: text
- required: false
- multivalued: false
keywords:
name: keywords
description: Terms to search over.
@@ -339,28 +327,20 @@ classes:
name: lab
description: Laboratory where experiment was performed.
range: text
- required: false
- multivalued: false
notes:
name: notes
description: Notes about the experiment.
range: text
- required: false
- multivalued: false
pharmacology:
name: pharmacology
description: Description of drugs used, including how and when they were administered.
Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.
range: text
- required: false
- multivalued: false
protocol:
name: protocol
description: Experimental protocol, if applicable. e.g., include IACUC protocol
number.
range: text
- required: false
- multivalued: false
related_publications:
name: related_publications
description: Publication information. PMID, DOI, URL, etc.
@@ -374,52 +354,31 @@ classes:
name: session_id
description: Lab-specific ID for the session.
range: text
- required: false
- multivalued: false
slices:
name: slices
description: Description of slices, including information about preparation
thickness, orientation, temperature, and bath solution.
range: text
- required: false
- multivalued: false
source_script:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
range: general__source_script
- required: false
- multivalued: false
inlined: true
stimulus:
name: stimulus
description: Notes about stimuli, such as how and where they were presented.
range: text
- required: false
- multivalued: false
surgery:
name: surgery
description: Narrative description about surgery/surgeries, including date(s)
and who performed surgery.
range: text
- required: false
- multivalued: false
virus:
name: virus
description: Information about virus(es) used in experiments, including virus
ID, source, date made, injection location, volume, etc.
range: text
- required: false
- multivalued: false
- lab_meta_data:
- name: lab_meta_data
- description: Place-holder than can be extended so that lab-specific meta-data
- can be placed in /general.
- range: LabMetaData
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
devices:
name: devices
description: Description of hardware devices used during experiment, e.g.,
@@ -434,24 +393,18 @@ classes:
description: Information about the animal or person from which the data was
measured.
range: Subject
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
range: general__extracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
range: general__intracellular_ephys
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
optogenetics:
@@ -470,6 +423,14 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
+ value:
+ name: value
+ description: Place-holder than can be extended so that lab-specific meta-data
+ can be placed in /general.
+ range: LabMetaData
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
general__source_script:
name: general__source_script
description: Script file or link to public source code used to create this NWB
@@ -502,22 +463,19 @@ classes:
range: string
required: true
equals_string: extracellular_ephys
- electrode_group:
- name: electrode_group
- description: Physical group of electrodes.
- range: ElectrodeGroup
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
range: extracellular_ephys__electrodes
- required: false
- multivalued: false
inlined: true
inlined_as_list: true
+ value:
+ name: value
+ description: Physical group of electrodes.
+ range: ElectrodeGroup
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
extracellular_ephys__electrodes:
name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
@@ -662,16 +620,6 @@ classes:
etc. If this changes between TimeSeries, filter description should be stored
as a text attribute for each TimeSeries.'
range: text
- required: false
- multivalued: false
- intracellular_electrode:
- name: intracellular_electrode
- description: An intracellular electrode.
- range: IntracellularElectrode
- required: false
- multivalued: true
- inlined: true
- inlined_as_list: false
sweep_table:
name: sweep_table
description: '[DEPRECATED] Table used to group different PatchClampSeries.
@@ -679,8 +627,6 @@ classes:
tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions
tables provide enhanced support for experiment metadata.'
range: SweepTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
intracellular_recordings:
@@ -698,8 +644,6 @@ classes:
to an electrode is also common in intracellular electrophysiology, in which
case other TimeSeries may be used.
range: IntracellularRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
simultaneous_recordings:
@@ -708,8 +652,6 @@ classes:
the IntracellularRecordingsTable table together that were recorded simultaneously
from different electrodes
range: SimultaneousRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
sequential_recordings:
@@ -719,8 +661,6 @@ classes:
together sequential recordings where the a sequence of stimuli of the same
type with varying parameters have been presented in a sequence.
range: SequentialRecordingsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
repetitions:
@@ -730,8 +670,6 @@ classes:
type of stimulus, the RepetitionsTable table is typically used to group
sets of stimuli applied in sequence.
range: RepetitionsTable
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
experimental_conditions:
@@ -739,8 +677,13 @@ classes:
description: A table for grouping different intracellular recording repetitions
together that belong to the same experimental experimental_conditions.
range: ExperimentalConditionsTable
- required: false
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ value:
+ name: value
+ description: An intracellular electrode.
+ range: IntracellularElectrode
+ multivalued: true
inlined: true
inlined_as_list: false
NWBFile__intervals:
@@ -761,32 +704,25 @@ classes:
description: Divisions in time marking experimental stages or sub-divisions
of a single recording session.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
trials:
name: trials
description: Repeated experimental events that have a logical grouping.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
invalid_times:
name: invalid_times
description: Time intervals that should be removed from analysis.
range: TimeIntervals
- required: false
- multivalued: false
inlined: true
inlined_as_list: false
- time_intervals:
- name: time_intervals
+ value:
+ name: value
description: Optional additional table(s) for describing other experimental
time intervals.
range: TimeIntervals
- required: false
multivalued: true
inlined: true
inlined_as_list: false
@@ -815,59 +751,41 @@ classes:
name: age
description: Age of subject. Can be supplied instead of 'date_of_birth'.
range: Subject__age
- required: false
- multivalued: false
inlined: true
date_of_birth:
name: date_of_birth
description: Date of birth of subject. Can be supplied instead of 'age'.
range: isodatetime
- required: false
- multivalued: false
description:
name: description
description: Description of subject and where subject came from (e.g., breeder,
if animal).
range: text
- required: false
- multivalued: false
genotype:
name: genotype
description: Genetic strain. If absent, assume Wild Type (WT).
range: text
- required: false
- multivalued: false
sex:
name: sex
description: Gender of subject.
range: text
- required: false
- multivalued: false
species:
name: species
description: Species of subject.
range: text
- required: false
- multivalued: false
strain:
name: strain
description: Strain of subject.
range: text
- required: false
- multivalued: false
subject_id:
name: subject_id
description: ID of animal/person used/participating in experiment (lab convention).
range: text
- required: false
- multivalued: false
weight:
name: weight
description: Weight at time of experiment, at time of surgery and at other
important times.
range: text
- required: false
- multivalued: false
tree_root: true
Subject__age:
name: Subject__age
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
index a8662e7..30fcb0c 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
@@ -41,15 +41,12 @@ classes:
description: Recorded voltage or current.
range: PatchClampSeries__data
required: true
- multivalued: false
inlined: true
gain:
name: gain
description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt
(c-clamp).
range: float32
- required: false
- multivalued: false
electrode:
name: electrode
annotations:
@@ -57,7 +54,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: IntracellularElectrode
@@ -74,6 +70,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -99,31 +140,24 @@ classes:
identifier: true
range: string
required: true
+ bias_current:
+ name: bias_current
+ description: Bias current, in amps.
+ range: float32
+ bridge_balance:
+ name: bridge_balance
+ description: Bridge balance, in ohms.
+ range: float32
+ capacitance_compensation:
+ name: capacitance_compensation
+ description: Capacitance compensation, in farads.
+ range: float32
data:
name: data
description: Recorded voltage.
range: CurrentClampSeries__data
required: true
- multivalued: false
inlined: true
- bias_current:
- name: bias_current
- description: Bias current, in amps.
- range: float32
- required: false
- multivalued: false
- bridge_balance:
- name: bridge_balance
- description: Bridge balance, in ohms.
- range: float32
- required: false
- multivalued: false
- capacitance_compensation:
- name: capacitance_compensation
- description: Capacitance compensation, in farads.
- range: float32
- required: false
- multivalued: false
tree_root: true
CurrentClampSeries__data:
name: CurrentClampSeries__data
@@ -136,6 +170,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -148,8 +227,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IZeroClampSeries:
name: IZeroClampSeries
description: Voltage data from an intracellular recording when all current and
@@ -176,19 +257,16 @@ classes:
description: Bias current, in amps, fixed to 0.0.
range: float32
required: true
- multivalued: false
bridge_balance:
name: bridge_balance
description: Bridge balance, in ohms, fixed to 0.0.
range: float32
required: true
- multivalued: false
capacitance_compensation:
name: capacitance_compensation
description: Capacitance compensation, in farads, fixed to 0.0.
range: float32
required: true
- multivalued: false
tree_root: true
CurrentClampStimulusSeries:
name: CurrentClampStimulusSeries
@@ -205,7 +283,6 @@ classes:
description: Stimulus current applied.
range: CurrentClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
CurrentClampStimulusSeries__data:
@@ -219,6 +296,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -231,8 +353,10 @@ classes:
equals_string: amperes
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries:
name: VoltageClampSeries
description: Current data from an intracellular voltage-clamp recording. A corresponding
@@ -245,88 +369,48 @@ classes:
identifier: true
range: string
required: true
- data:
- name: data
- description: Recorded current.
- range: VoltageClampSeries__data
- required: true
- multivalued: false
- inlined: true
capacitance_fast:
name: capacitance_fast
description: Fast capacitance, in farads.
range: VoltageClampSeries__capacitance_fast
- required: false
- multivalued: false
inlined: true
capacitance_slow:
name: capacitance_slow
description: Slow capacitance, in farads.
range: VoltageClampSeries__capacitance_slow
- required: false
- multivalued: false
+ inlined: true
+ data:
+ name: data
+ description: Recorded current.
+ range: VoltageClampSeries__data
+ required: true
inlined: true
resistance_comp_bandwidth:
name: resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
range: VoltageClampSeries__resistance_comp_bandwidth
- required: false
- multivalued: false
inlined: true
resistance_comp_correction:
name: resistance_comp_correction
description: Resistance compensation correction, in percent.
range: VoltageClampSeries__resistance_comp_correction
- required: false
- multivalued: false
inlined: true
resistance_comp_prediction:
name: resistance_comp_prediction
description: Resistance compensation prediction, in percent.
range: VoltageClampSeries__resistance_comp_prediction
- required: false
- multivalued: false
inlined: true
whole_cell_capacitance_comp:
name: whole_cell_capacitance_comp
description: Whole cell capacitance compensation, in farads.
range: VoltageClampSeries__whole_cell_capacitance_comp
- required: false
- multivalued: false
inlined: true
whole_cell_series_resistance_comp:
name: whole_cell_series_resistance_comp
description: Whole cell series resistance compensation, in ohms.
range: VoltageClampSeries__whole_cell_series_resistance_comp
- required: false
- multivalued: false
inlined: true
tree_root: true
- VoltageClampSeries__data:
- name: VoltageClampSeries__data
- description: Recorded current.
- attributes:
- name:
- name: name
- ifabsent: string(data)
- identifier: true
- range: string
- required: true
- equals_string: data
- unit:
- name: unit
- description: Base unit of measurement for working with the data. which is
- fixed to 'amperes'. Actual stored values are not necessarily stored in these
- units. To access the data in these units, multiply 'data' by 'conversion'
- and add 'offset'.
- ifabsent: string(amperes)
- range: text
- required: true
- equals_string: amperes
- value:
- name: value
- range: AnyType
- required: true
VoltageClampSeries__capacitance_fast:
name: VoltageClampSeries__capacitance_fast
description: Fast capacitance, in farads.
@@ -371,6 +455,78 @@ classes:
name: value
range: float32
required: true
+ VoltageClampSeries__data:
+ name: VoltageClampSeries__data
+ description: Recorded current.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. which is
+ fixed to 'amperes'. Actual stored values are not necessarily stored in these
+ units. To access the data in these units, multiply 'data' by 'conversion'
+ and add 'offset'.
+ ifabsent: string(amperes)
+ range: text
+ required: true
+ equals_string: amperes
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
VoltageClampSeries__resistance_comp_bandwidth:
name: VoltageClampSeries__resistance_comp_bandwidth
description: Resistance compensation bandwidth, in hertz.
@@ -501,7 +657,6 @@ classes:
description: Stimulus voltage applied.
range: VoltageClampStimulusSeries__data
required: true
- multivalued: false
inlined: true
tree_root: true
VoltageClampStimulusSeries__data:
@@ -515,6 +670,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. which is
@@ -527,8 +727,10 @@ classes:
equals_string: volts
value:
name: value
- range: AnyType
- required: true
+ array:
+ dimensions:
+ - alias: num_times
+ range: numeric
IntracellularElectrode:
name: IntracellularElectrode
description: An intracellular electrode and its metadata.
@@ -543,52 +745,37 @@ classes:
name: cell_id
description: unique ID of the cell
range: text
- required: false
- multivalued: false
description:
name: description
description: Description of electrode (e.g., whole-cell, sharp, etc.).
range: text
required: true
- multivalued: false
filtering:
name: filtering
description: Electrode specific filtering.
range: text
- required: false
- multivalued: false
initial_access_resistance:
name: initial_access_resistance
description: Initial access resistance.
range: text
- required: false
- multivalued: false
location:
name: location
description: Location of the electrode. Specify the area, layer, comments
on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
standard atlas names for anatomical regions when possible.
range: text
- required: false
- multivalued: false
resistance:
name: resistance
description: Electrode resistance, in ohms.
range: text
- required: false
- multivalued: false
seal:
name: seal
description: Information about seal used for recording.
range: text
- required: false
- multivalued: false
slice:
name: slice
description: Information about slice used for recording.
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
@@ -596,7 +783,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -615,15 +801,6 @@ classes:
identifier: true
range: string
required: true
- sweep_number:
- name: sweep_number
- description: Sweep number of the PatchClampSeries in that row.
- array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: uint32
- required: true
- multivalued: false
series:
name: series
description: The PatchClampSeries with the sweep number in that row.
@@ -646,8 +823,16 @@ classes:
description: Index for series.
range: VectorIndex
required: true
- multivalued: false
inlined: true
+ sweep_number:
+ name: sweep_number
+ description: Sweep number of the PatchClampSeries in that row.
+ array:
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: uint32
+ required: true
+ multivalued: false
tree_root: true
IntracellularElectrodesTable:
name: IntracellularElectrodesTable
@@ -707,7 +892,6 @@ classes:
recording (rows).
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
stimulus_template:
name: stimulus_template
@@ -721,8 +905,6 @@ classes:
description: Column storing the reference to the stimulus template for the
recording (rows).
range: TimeSeriesReferenceVectorData
- required: false
- multivalued: false
inlined: true
tree_root: true
IntracellularResponsesTable:
@@ -755,7 +937,6 @@ classes:
recording (rows)
range: TimeSeriesReferenceVectorData
required: true
- multivalued: false
inlined: true
tree_root: true
IntracellularRecordingsTable:
@@ -797,15 +978,6 @@ classes:
description: Table for storing intracellular electrode related metadata.
range: IntracellularElectrodesTable
required: true
- multivalued: false
- inlined: true
- inlined_as_list: false
- stimuli:
- name: stimuli
- description: Table for storing intracellular stimulus related metadata.
- range: IntracellularStimuliTable
- required: true
- multivalued: false
inlined: true
inlined_as_list: false
responses:
@@ -813,7 +985,13 @@ classes:
description: Table for storing intracellular response related metadata.
range: IntracellularResponsesTable
required: true
- multivalued: false
+ inlined: true
+ inlined_as_list: false
+ stimuli:
+ name: stimuli
+ description: Table for storing intracellular stimulus related metadata.
+ range: IntracellularStimuliTable
+ required: true
inlined: true
inlined_as_list: false
tree_root: true
@@ -837,7 +1015,6 @@ classes:
table.
range: SimultaneousRecordingsTable__recordings
required: true
- multivalued: false
inlined: true
recordings_index:
name: recordings_index
@@ -851,7 +1028,6 @@ classes:
description: Index dataset for the recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
SimultaneousRecordingsTable__recordings:
@@ -896,7 +1072,6 @@ classes:
table.
range: SequentialRecordingsTable__simultaneous_recordings
required: true
- multivalued: false
inlined: true
simultaneous_recordings_index:
name: simultaneous_recordings_index
@@ -910,7 +1085,6 @@ classes:
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
stimulus_type:
name: stimulus_type
@@ -964,7 +1138,6 @@ classes:
table.
range: RepetitionsTable__sequential_recordings
required: true
- multivalued: false
inlined: true
sequential_recordings_index:
name: sequential_recordings_index
@@ -978,7 +1151,6 @@ classes:
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
RepetitionsTable__sequential_recordings:
@@ -1020,7 +1192,6 @@ classes:
description: A reference to one or more rows in the RepetitionsTable table.
range: ExperimentalConditionsTable__repetitions
required: true
- multivalued: false
inlined: true
repetitions_index:
name: repetitions_index
@@ -1034,7 +1205,6 @@ classes:
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
- multivalued: false
inlined: true
tree_root: true
ExperimentalConditionsTable__repetitions:
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml
index 603c351..6b17e13 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml
@@ -91,21 +91,9 @@ classes:
name: data
description: Binary data representing images across frames. If data are stored
in an external file, this should be an empty 3D array.
- range: numeric
+ range: ImageSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - array:
- dimensions:
- - alias: frame
- - alias: x
- - alias: y
- - alias: z
+ inlined: true
dimension:
name: dimension
description: Number of pixels on x, y, (and z) axes.
@@ -123,8 +111,6 @@ classes:
used if the image is stored in another NWB file and that file is linked
to this file.
range: ImageSeries__external_file
- required: false
- multivalued: false
inlined: true
format:
name: format
@@ -132,22 +118,98 @@ classes:
contains the path information to the image files. If this is 'raw', then
the raw (single-channel) binary data is stored in the 'data' dataset. If
this attribute is not present, then the default format='raw' case is assumed.
+ ifabsent: string(raw)
range: text
- required: false
- multivalued: false
device:
name: device
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Device
- range: string
tree_root: true
+ ImageSeries__data:
+ name: ImageSeries__data
+ description: Binary data representing images across frames. If data are stored
+ in an external file, this should be an empty 3D array.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - array:
+ dimensions:
+ - alias: frame
+ - alias: x
+ - alias: y
+ - alias: z
ImageSeries__external_file:
name: ImageSeries__external_file
description: Paths to one or more external file(s). The field is only present
@@ -206,7 +268,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -226,12 +287,16 @@ classes:
identifier: true
range: string
required: true
+ data:
+ name: data
+ description: Images presented to subject, either grayscale or RGB
+ range: OpticalSeries__data
+ required: true
+ inlined: true
distance:
name: distance
description: Distance from camera/monitor to target/eye.
range: float32
- required: false
- multivalued: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -247,12 +312,78 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
- data:
- name: data
- description: Images presented to subject, either grayscale or RGB
- range: numeric
+ orientation:
+ name: orientation
+ description: Description of image relative to some reference frame (e.g.,
+ which way is up). Must also specify frame of reference.
+ range: text
+ tree_root: true
+ OpticalSeries__data:
+ name: OpticalSeries__data
+ description: Images presented to subject, either grayscale or RGB
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
required: true
- multivalued: false
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -266,14 +397,6 @@ classes:
- alias: y
- alias: r_g_b
exact_cardinality: 3
- orientation:
- name: orientation
- description: Description of image relative to some reference frame (e.g.,
- which way is up). Must also specify frame of reference.
- range: text
- required: false
- multivalued: false
- tree_root: true
IndexSeries:
name: IndexSeries
description: Stores indices to image frames stored in an ImageSeries. The purpose
@@ -294,20 +417,15 @@ classes:
name: data
description: Index of the image (using zero-indexing) in the linked Images
object.
- array:
- dimensions:
- - alias: num_times
- range: uint32
+ range: IndexSeries__data
required: true
- multivalued: false
+ inlined: true
indexed_timeseries:
name: indexed_timeseries
annotations:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
@@ -318,10 +436,62 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: Images
- range: string
tree_root: true
+ IndexSeries__data:
+ name: IndexSeries__data
+ description: Index of the image (using zero-indexing) in the linked Images object.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: This field is unused by IndexSeries.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: This field is unused by IndexSeries.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: This field is unused by IndexSeries.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: This field is unused by IndexSeries and has the value N/A.
+ ifabsent: string(N/A)
+ range: text
+ required: true
+ equals_string: N/A
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: num_times
+ range: uint32
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml
index e42c742..e36f824 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
index b30070d..917e860 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
@@ -38,7 +38,6 @@ classes:
description: Values of each feature at each time.
range: AbstractFeatureSeries__data
required: true
- multivalued: false
inlined: true
feature_units:
name: feature_units
@@ -70,6 +69,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Since there can be different units for different features, store
@@ -105,13 +149,79 @@ classes:
data:
name: data
description: Annotations made during an experiment.
+ range: AnnotationSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ AnnotationSeries__data:
+ name: AnnotationSeries__data
+ description: Annotations made during an experiment.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: text
- required: true
- multivalued: false
- tree_root: true
IntervalSeries:
name: IntervalSeries
description: Stores intervals of data. The timestamps field stores the beginning
@@ -131,13 +241,79 @@ classes:
data:
name: data
description: Use values >0 if interval started, <0 if interval ended.
+ range: IntervalSeries__data
+ required: true
+ inlined: true
+ tree_root: true
+ IntervalSeries__data:
+ name: IntervalSeries__data
+ description: Use values >0 if interval started, <0 if interval ended.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data. Annotations
+ have no units, so the value is fixed to -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: true
+ equals_number: -1
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Annotations
+ have no units, so the value is fixed to 'n/a'.
+ ifabsent: string(n/a)
+ range: text
+ required: true
+ equals_string: n/a
+ value:
+ name: value
array:
dimensions:
- alias: num_times
range: int8
- required: true
- multivalued: false
- tree_root: true
DecompositionSeries:
name: DecompositionSeries
description: Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@@ -153,14 +329,12 @@ classes:
description: Data decomposed into frequency bands.
range: DecompositionSeries__data
required: true
- multivalued: false
inlined: true
metric:
name: metric
description: The metric used, e.g. phase, amplitude, power.
range: text
required: true
- multivalued: false
source_channels:
name: source_channels
annotations:
@@ -173,8 +347,6 @@ classes:
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
bands:
name: bands
@@ -182,7 +354,6 @@ classes:
from. There should be one row in this table for each band.
range: DecompositionSeries__bands
required: true
- multivalued: false
inlined: true
inlined_as_list: true
source_timeseries:
@@ -191,8 +362,6 @@ classes:
source_type:
tag: source_type
value: link
- required: false
- multivalued: false
inlined: true
any_of:
- range: TimeSeries
@@ -209,6 +378,51 @@ classes:
range: string
required: true
equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
@@ -289,63 +503,13 @@ classes:
identifier: true
range: string
required: true
- spike_times_index:
- name: spike_times_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the spike_times dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- spike_times:
- name: spike_times
- description: Spike times for each unit in seconds.
- range: Units__spike_times
- required: false
- multivalued: false
- inlined: true
- obs_intervals_index:
- name: obs_intervals_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into the obs_intervals dataset.
- range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- obs_intervals:
- name: obs_intervals
- description: Observation intervals for each unit.
+ electrode_group:
+ name: electrode_group
+ description: Electrode group that each spike unit came from.
array:
- dimensions:
- - alias: num_intervals
- - alias: start_end
- exact_cardinality: 2
- range: float64
- required: false
- multivalued: false
- electrodes_index:
- name: electrodes_index
- annotations:
- named:
- tag: named
- value: true
- source_type:
- tag: source_type
- value: neurodata_type_inc
- description: Index into electrodes.
- range: VectorIndex
+ minimum_number_dimensions: 1
+ maximum_number_dimensions: false
+ range: ElectrodeGroup
required: false
multivalued: false
inlined: true
@@ -360,51 +524,69 @@ classes:
value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
- required: false
- multivalued: false
inlined: true
- electrode_group:
- name: electrode_group
- description: Electrode group that each spike unit came from.
+ electrodes_index:
+ name: electrodes_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into electrodes.
+ range: VectorIndex
+ inlined: true
+ obs_intervals:
+ name: obs_intervals
+ description: Observation intervals for each unit.
array:
- minimum_number_dimensions: 1
- maximum_number_dimensions: false
- range: ElectrodeGroup
+ dimensions:
+ - alias: num_intervals
+ - alias: start_end
+ exact_cardinality: 2
+ range: float64
required: false
multivalued: false
+ obs_intervals_index:
+ name: obs_intervals_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the obs_intervals dataset.
+ range: VectorIndex
+ inlined: true
+ spike_times:
+ name: spike_times
+ description: Spike times for each unit in seconds.
+ range: Units__spike_times
+ inlined: true
+ spike_times_index:
+ name: spike_times_index
+ annotations:
+ named:
+ tag: named
+ value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
+ description: Index into the spike_times dataset.
+ range: VectorIndex
inlined: true
waveform_mean:
name: waveform_mean
description: Spike waveform mean for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_mean
+ inlined: true
waveform_sd:
name: waveform_sd
description: Spike waveform standard deviation for each spike unit.
- range: float32
- required: false
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - array:
- dimensions:
- - alias: num_units
- - alias: num_samples
- - alias: num_electrodes
+ range: Units__waveform_sd
+ inlined: true
waveforms:
name: waveforms
description: Individual waveforms for each spike on each electrode. This is
@@ -430,13 +612,8 @@ classes:
order of the waveforms within a given unit x spike event should be in the
same order as the electrodes referenced in the 'electrodes' column of this
table. The number of samples for each waveform must be the same.
- array:
- dimensions:
- - alias: num_waveforms
- - alias: num_samples
- range: numeric
- required: false
- multivalued: false
+ range: Units__waveforms
+ inlined: true
waveforms_index:
name: waveforms_index
annotations:
@@ -449,8 +626,6 @@ classes:
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
waveforms_index_index:
name: waveforms_index_index
@@ -464,8 +639,6 @@ classes:
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
tree_root: true
Units__spike_times:
@@ -489,3 +662,97 @@ classes:
for the spike time to be between samples.
range: float64
required: false
+ Units__waveform_mean:
+ name: Units__waveform_mean
+ description: Spike waveform mean for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_mean)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_mean
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveform_sd:
+ name: Units__waveform_sd
+ description: Spike waveform standard deviation for each spike unit.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveform_sd)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveform_sd
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
+ Units__waveforms:
+ name: Units__waveforms
+ description: Individual waveforms for each spike on each electrode. This is a
+ doubly indexed column. The 'waveforms_index' column indexes which waveforms
+ in this column belong to the same spike event for a given unit, where each waveform
+ was recorded from a different electrode. The 'waveforms_index_index' column
+ indexes the 'waveforms_index' column to indicate which spike events belong to
+ a given unit. For example, if the 'waveforms_index_index' column has values
+ [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond
+ to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index'
+ column correspond to the 3 spike events of the second unit, and the next 1 element
+ of the 'waveforms_index' column corresponds to the 1 spike event of the third
+ unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then
+ the first 3 elements of the 'waveforms' column contain the 3 spike waveforms
+ that were recorded from 3 different electrodes for the first spike time of the
+ first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays
+ for a graphical representation of this example. When there is only one electrode
+ for each unit (i.e., each spike time is associated with a single waveform),
+ then the 'waveforms_index' column will have values 1, 2, ..., N, where N is
+ the number of spike events. The number of electrodes for each spike event should
+ be the same within a given unit. The 'electrodes' column should be used to indicate
+ which electrodes are associated with each unit, and the order of the waveforms
+ within a given unit x spike event should be in the same order as the electrodes
+ referenced in the 'electrodes' column of this table. The number of samples for
+ each waveform must be the same.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ ifabsent: string(waveforms)
+ identifier: true
+ range: string
+ required: true
+ equals_string: waveforms
+ sampling_rate:
+ name: sampling_rate
+ description: Sampling rate, in hertz.
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement. This value is fixed to 'volts'.
+ ifabsent: string(volts)
+ range: text
+ required: false
+ equals_string: volts
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
index 9cc7b0d..2e71557 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
@@ -29,9 +29,89 @@ classes:
description: Applied power for optogenetic stimulus, in watts. Shape can be
1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries
that defines what the second dimension represents.
- range: numeric
+ range: OptogeneticSeries__data
required: true
- multivalued: false
+ inlined: true
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ inlined: true
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
+ tree_root: true
+ OptogeneticSeries__data:
+ name: OptogeneticSeries__data
+ description: Applied power for optogenetic stimulus, in watts. Shape can be 1D
+ or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that
+ defines what the second dimension represents.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Unit of measurement for data, which is fixed to 'watts'.
+ ifabsent: string(watts)
+ range: text
+ required: true
+ equals_string: watts
+ value:
+ name: value
+ range: numeric
any_of:
- array:
dimensions:
@@ -40,19 +120,6 @@ classes:
dimensions:
- alias: num_times
- alias: num_rois
- site:
- name: site
- annotations:
- source_type:
- tag: source_type
- value: link
- required: true
- multivalued: false
- inlined: true
- any_of:
- - range: OptogeneticStimulusSite
- - range: string
- tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
description: A site of optogenetic stimulation.
@@ -68,13 +135,11 @@ classes:
description: Description of stimulation site.
range: text
required: true
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
location:
name: location
description: Location of the stimulation site. Specify the area, layer, comments
@@ -82,7 +147,6 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
device:
name: device
annotations:
@@ -90,7 +154,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
index b5d3676..293371a 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
@@ -26,11 +26,31 @@ classes:
identifier: true
range: string
required: true
+ binning:
+ name: binning
+ description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
+ range: uint8
+ required: false
+ exposure_time:
+ name: exposure_time
+ description: Exposure time of the sample; often the inverse of the frequency.
+ range: float32
+ required: false
+ intensity:
+ name: intensity
+ description: Intensity of the excitation in mW/mm^2, if known.
+ range: float32
+ required: false
pmt_gain:
name: pmt_gain
description: Photomultiplier gain.
range: float32
required: false
+ power:
+ name: power
+ description: Power of the excitation in mW, if known.
+ range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
@@ -38,26 +58,6 @@ classes:
be stored w/ the actual data.
range: float32
required: false
- exposure_time:
- name: exposure_time
- description: Exposure time of the sample; often the inverse of the frequency.
- range: float32
- required: false
- binning:
- name: binning
- description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
- range: uint8
- required: false
- power:
- name: power
- description: Power of the excitation in mW, if known.
- range: float32
- required: false
- intensity:
- name: intensity
- description: Intensity of the excitation in mW/mm^2, if known.
- range: float32
- required: false
imaging_plane:
name: imaging_plane
annotations:
@@ -65,7 +65,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -115,7 +114,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -135,17 +133,9 @@ classes:
data:
name: data
description: Signals from ROIs.
- range: numeric
+ range: RoiResponseSeries__data
required: true
- multivalued: false
- any_of:
- - array:
- dimensions:
- - alias: num_times
- - array:
- dimensions:
- - alias: num_times
- - alias: num_rois
+ inlined: true
rois:
name: rois
annotations:
@@ -159,9 +149,82 @@ classes:
on the ROIs stored in this timeseries.
range: DynamicTableRegion
required: true
- multivalued: false
inlined: true
tree_root: true
+ RoiResponseSeries__data:
+ name: RoiResponseSeries__data
+ description: Signals from ROIs.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ identifier: true
+ range: string
+ required: true
+ equals_string: data
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as signed 16-bit
+ integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V
+ to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
+ multiplier to get from raw data acquisition values to recorded volts is
+ 2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ offset:
+ name: offset
+ description: Scalar to add to the data after scaling by 'conversion' to finalize
+ its coercion to the specified 'unit'. Two common examples of this include
+ (a) data stored in an unsigned type that requires a shift after scaling
+ to re-center the data, and (b) specialized recording devices that naturally
+ cause a scalar offset with respect to the true units.
+ range: float32
+ required: false
+ resolution:
+ name: resolution
+ description: Smallest meaningful difference between values in data, stored
+ in the specified by unit, e.g., the change in value of the least significant
+ bit, or a larger number if signal noise is known to be present. If unknown,
+ use -1.0.
+ ifabsent: float(-1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. Actual stored
+ values are not necessarily stored in these units. To access the data in
+ these units, multiply 'data' by 'conversion' and add 'offset'.
+ range: text
+ required: true
+ continuity:
+ name: continuity
+ description: Optionally describe the continuity of the data. Can be "continuous",
+ "instantaneous", or "step". For example, a voltage trace would be "continuous",
+ because samples are recorded from a continuous process. An array of lick
+ times would be "instantaneous", because the data represents distinct moments
+ in time. Times of image presentations would be "step" because the picture
+ remains the same until the next timepoint. This field is optional, but is
+ useful in providing information about the underlying data. It may inform
+ the way this data is interpreted, the way it is visualized, and what analysis
+ methods are applicable.
+ range: text
+ required: false
+ value:
+ name: value
+ range: numeric
+ any_of:
+ - array:
+ dimensions:
+ - alias: num_times
+ - array:
+ dimensions:
+ - alias: num_times
+ - alias: num_rois
DfOverF:
name: DfOverF
description: dF/F information about a region of interest (ROI). Storage hierarchy
@@ -169,12 +232,19 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(DfOverF)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
Fluorescence:
name: Fluorescence
@@ -183,12 +253,19 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: RoiResponseSeries
+ name:
+ name: name
+ ifabsent: string(Fluorescence)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: RoiResponseSeries
tree_root: true
ImageSegmentation:
name: ImageSegmentation
@@ -201,12 +278,19 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: PlaneSegmentation
+ name:
+ name: name
+ ifabsent: string(ImageSegmentation)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: PlaneSegmentation
tree_root: true
PlaneSegmentation:
name: PlaneSegmentation
@@ -237,6 +321,13 @@ classes:
- alias: num_x
- alias: num_y
- alias: num_z
+ pixel_mask:
+ name: pixel_mask
+ description: 'Pixel masks for each ROI: a list of indices and weights for
+ the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ by the PlaneSegmentation'
+ range: PlaneSegmentation__pixel_mask
+ inlined: true
pixel_mask_index:
name: pixel_mask_index
annotations:
@@ -248,17 +339,13 @@ classes:
value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
- required: false
- multivalued: false
inlined: true
- pixel_mask:
- name: pixel_mask
- description: 'Pixel masks for each ROI: a list of indices and weights for
- the ROI. Pixel masks are concatenated and parsing of this dataset is maintained
+ voxel_mask:
+ name: voxel_mask
+ description: 'Voxel masks for each ROI: a list of indices and weights for
+ the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
by the PlaneSegmentation'
- range: PlaneSegmentation__pixel_mask
- required: false
- multivalued: false
+ range: PlaneSegmentation__voxel_mask
inlined: true
voxel_mask_index:
name: voxel_mask_index
@@ -271,17 +358,6 @@ classes:
value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
- required: false
- multivalued: false
- inlined: true
- voxel_mask:
- name: voxel_mask
- description: 'Voxel masks for each ROI: a list of indices and weights for
- the ROI. Voxel masks are concatenated and parsing of this dataset is maintained
- by the PlaneSegmentation'
- range: PlaneSegmentation__voxel_mask
- required: false
- multivalued: false
inlined: true
reference_images:
name: reference_images
@@ -298,7 +374,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImagingPlane
@@ -324,24 +399,18 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Pixel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the pixel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
PlaneSegmentation__voxel_mask:
name: PlaneSegmentation__voxel_mask
description: 'Voxel masks for each ROI: a list of indices and weights for the
@@ -362,32 +431,24 @@ classes:
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
y:
name: y
description: Voxel y-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
z:
name: z
description: Voxel z-coordinate.
array:
exact_number_dimensions: 1
range: uint32
- required: false
- multivalued: false
weight:
name: weight
description: Weight of the voxel.
array:
exact_number_dimensions: 1
range: float32
- required: false
- multivalued: false
ImagingPlane:
name: ImagingPlane
description: An imaging plane and its metadata.
@@ -402,27 +463,21 @@ classes:
name: description
description: Description of the imaging plane.
range: text
- required: false
- multivalued: false
excitation_lambda:
name: excitation_lambda
description: Excitation wavelength, in nm.
range: float32
required: true
- multivalued: false
imaging_rate:
name: imaging_rate
description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
is present, the rate should be stored there instead.
range: float32
- required: false
- multivalued: false
indicator:
name: indicator
description: Calcium indicator.
range: text
required: true
- multivalued: false
location:
name: location
description: Location of the imaging plane. Specify the area, layer, comments
@@ -430,15 +485,12 @@ classes:
standard atlas names for anatomical regions when possible.
range: text
required: true
- multivalued: false
manifold:
name: manifold
description: DEPRECATED Physical position of each pixel. 'xyz' represents
the position of the pixel relative to the defined coordinate space. Deprecated
in favor of origin_coords and grid_spacing.
range: ImagingPlane__manifold
- required: false
- multivalued: false
inlined: true
origin_coords:
name: origin_coords
@@ -446,8 +498,6 @@ classes:
0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
what the physical location is relative to (e.g., bregma).
range: ImagingPlane__origin_coords
- required: false
- multivalued: false
inlined: true
grid_spacing:
name: grid_spacing
@@ -455,8 +505,6 @@ classes:
in the specified unit. Assumes imaging plane is a regular grid. See also
reference_frame to interpret the grid.
range: ImagingPlane__grid_spacing
- required: false
- multivalued: false
inlined: true
reference_frame:
name: reference_frame
@@ -478,8 +526,6 @@ classes:
axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
axis (larger index = more ventral)."
range: text
- required: false
- multivalued: false
optical_channel:
name: optical_channel
description: An optical channel used to record from an imaging plane.
@@ -495,7 +541,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: Device
@@ -628,13 +673,11 @@ classes:
description: Description or other notes about the channel.
range: text
required: true
- multivalued: false
emission_lambda:
name: emission_lambda
description: Emission wavelength for channel, in nm.
range: float32
required: true
- multivalued: false
tree_root: true
MotionCorrection:
name: MotionCorrection
@@ -643,12 +686,19 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: CorrectedImageStack
+ name:
+ name: name
+ ifabsent: string(MotionCorrection)
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: CorrectedImageStack
tree_root: true
CorrectedImageStack:
name: CorrectedImageStack
@@ -665,7 +715,6 @@ classes:
description: Image stack with frames shifted to the common coordinates.
range: ImageSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
xy_translation:
@@ -674,7 +723,6 @@ classes:
coordinates, for example, to align each frame to a reference image.
range: TimeSeries
required: true
- multivalued: false
inlined: true
inlined_as_list: false
original:
@@ -684,7 +732,6 @@ classes:
tag: source_type
value: link
required: true
- multivalued: false
inlined: true
any_of:
- range: ImageSeries
diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
index 8cc1810..a376d92 100644
--- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
@@ -37,30 +37,24 @@ classes:
description: Phase response to stimulus on the first measured axis.
range: ImagingRetinotopy__axis_1_phase_map
required: true
- multivalued: false
inlined: true
axis_1_power_map:
name: axis_1_power_map
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_1_power_map
- required: false
- multivalued: false
inlined: true
axis_2_phase_map:
name: axis_2_phase_map
description: Phase response to stimulus on the second measured axis.
range: ImagingRetinotopy__axis_2_phase_map
required: true
- multivalued: false
inlined: true
axis_2_power_map:
name: axis_2_power_map
description: Power response on the second measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: ImagingRetinotopy__axis_2_power_map
- required: false
- multivalued: false
inlined: true
axis_descriptions:
name: axis_descriptions
@@ -79,16 +73,12 @@ classes:
description: 'Gray-scale image taken with same settings/parameters (e.g.,
focal depth, wavelength) as data collection. Array format: [rows][columns].'
range: ImagingRetinotopy__focal_depth_image
- required: false
- multivalued: false
inlined: true
sign_map:
name: sign_map
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: ImagingRetinotopy__sign_map
- required: false
- multivalued: false
inlined: true
vasculature_image:
name: vasculature_image
@@ -96,7 +86,6 @@ classes:
[rows][columns]'
range: ImagingRetinotopy__vasculature_image
required: true
- multivalued: false
inlined: true
tree_root: true
ImagingRetinotopy__axis_1_phase_map:
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml
index 6ba8106..5f37d89 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml
index 7e1a614..de0d90a 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,11 +177,4 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
index 91de7c2..7493ece 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
index f8adba6..a29024b 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,13 +177,6 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
@@ -196,10 +189,26 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ categories:
+ name: categories
+ description: The names of the categories in this AlignedDynamicTable. Each
+ category is represented by one DynamicTable stored in the parent group.
+ This attribute should be used to specify an order of categories and the
+ category names must match the names of the corresponding DynamicTable in
+ the group.
+ range: text
+ required: true
+ multivalued: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml
index 4fd80e6..4173cbc 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml
index 52b119d..44a3b23 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,13 +177,6 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
@@ -196,10 +189,26 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ categories:
+ name: categories
+ description: The names of the categories in this AlignedDynamicTable. Each
+ category is represented by one DynamicTable stored in the parent group.
+ This attribute should be used to specify an order of categories and the
+ category names must match the names of the corresponding DynamicTable in
+ the group.
+ range: text
+ required: true
+ multivalued: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml
index beb539c..5b2a0ef 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml
index 85675e7..d53e72f 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,13 +177,6 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
@@ -196,10 +189,26 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ categories:
+ name: categories
+ description: The names of the categories in this AlignedDynamicTable. Each
+ category is represented by one DynamicTable stored in the parent group.
+ This attribute should be used to specify an order of categories and the
+ category names must match the names of the corresponding DynamicTable in
+ the group.
+ range: text
+ required: true
+ multivalued: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml
index f65f22b..652ffad 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml
index 9ffb97d..274356e 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,13 +177,6 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
@@ -196,10 +189,26 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ categories:
+ name: categories
+ description: The names of the categories in this AlignedDynamicTable. Each
+ category is represented by one DynamicTable stored in the parent group.
+ This attribute should be used to specify an order of categories and the
+ category names must match the names of the corresponding DynamicTable in
+ the group.
+ range: text
+ required: true
+ multivalued: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
index ea83af3..3a89816 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
@@ -38,10 +38,16 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: Container
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
index e3d3df3..6b8ce10 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
index 940f1b7..8b73408 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
@@ -116,6 +116,11 @@ classes:
identifier: true
range: string
required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
@@ -123,11 +128,6 @@ classes:
range: DynamicTable
required: true
inlined: true
- description:
- name: description
- description: Description of what this table region points to.
- range: text
- required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -177,13 +177,6 @@ classes:
range: int
required: true
multivalued: false
- vector_data:
- name: vector_data
- description: Vector columns, including index columns, of this dynamic table.
- range: VectorData
- required: false
- multivalued: true
- inlined: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
@@ -196,10 +189,26 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- - name: value
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: DynamicTable
+ name:
+ name: name
+ identifier: true
+ range: string
+ required: true
+ categories:
+ name: categories
+ description: The names of the categories in this AlignedDynamicTable. Each
+ category is represented by one DynamicTable stored in the parent group.
+ This attribute should be used to specify an order of categories and the
+ category names must match the names of the corresponding DynamicTable in
+ the group.
+ range: text
+ required: true
+ multivalued: true
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
tree_root: true
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
index 0a824ca..dcf2549 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
index a962b8f..a8d955d 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
@@ -31,21 +31,18 @@ classes:
resources.
range: ExternalResources__keys
required: true
- multivalued: false
inlined: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__entities
required: true
- multivalued: false
inlined: true
resources:
name: resources
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__resources
required: true
- multivalued: false
inlined: true
objects:
name: objects
@@ -53,14 +50,12 @@ classes:
to external resources.
range: ExternalResources__objects
required: true
- multivalued: false
inlined: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: ExternalResources__object_keys
required: true
- multivalued: false
inlined: true
tree_root: true
ExternalResources__keys:
@@ -84,7 +79,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__entities:
name: ExternalResources__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -104,7 +98,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
resources_idx:
name: resources_idx
description: The index into the 'resources' table
@@ -112,7 +105,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
entity_id:
name: entity_id
description: The unique identifier entity.
@@ -120,7 +112,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
entity_uri:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
@@ -129,7 +120,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__resources:
name: ExternalResources__resources
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -149,7 +139,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
resource_uri:
name: resource_uri
description: The URI for the resource. This can be an empty string.
@@ -157,7 +146,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__objects:
name: ExternalResources__objects
description: A table for identifying which objects in a file contain references
@@ -178,7 +166,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
field:
name: field
description: The field of the object. This can be an empty string if the object
@@ -187,7 +174,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__object_keys:
name: ExternalResources__object_keys
description: A table for identifying which objects use which keys.
@@ -208,7 +194,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The index to the 'keys' table for the key.
@@ -216,4 +201,3 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml
index 0a824ca..dcf2549 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml
index 89023ae..4aadb91 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml
@@ -31,21 +31,18 @@ classes:
resources.
range: ExternalResources__keys
required: true
- multivalued: false
inlined: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__entities
required: true
- multivalued: false
inlined: true
resources:
name: resources
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__resources
required: true
- multivalued: false
inlined: true
objects:
name: objects
@@ -53,14 +50,12 @@ classes:
to external resources.
range: ExternalResources__objects
required: true
- multivalued: false
inlined: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: ExternalResources__object_keys
required: true
- multivalued: false
inlined: true
tree_root: true
ExternalResources__keys:
@@ -84,7 +79,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__entities:
name: ExternalResources__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -104,7 +98,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
resources_idx:
name: resources_idx
description: The index into the 'resources' table
@@ -112,7 +105,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
entity_id:
name: entity_id
description: The unique identifier entity.
@@ -120,7 +112,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
entity_uri:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
@@ -129,7 +120,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__resources:
name: ExternalResources__resources
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -149,7 +139,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
resource_uri:
name: resource_uri
description: The URI for the resource. This can be an empty string.
@@ -157,7 +146,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__objects:
name: ExternalResources__objects
description: A table for identifying which objects in a file contain references
@@ -178,7 +166,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
relative_path:
name: relative_path
description: The relative path from the container with the object_id to the
@@ -189,7 +176,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
field:
name: field
description: The field of the compound data type using an external resource.
@@ -199,7 +185,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__object_keys:
name: ExternalResources__object_keys
description: A table for identifying which objects use which keys.
@@ -220,7 +205,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The index to the 'keys' table for the key.
@@ -228,4 +212,3 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml
index 0a824ca..dcf2549 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml
index d0909a2..52d014f 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml
@@ -31,21 +31,18 @@ classes:
resources.
range: ExternalResources__keys
required: true
- multivalued: false
inlined: true
files:
name: files
description: A table for storing object ids of files used in external resources.
range: ExternalResources__files
required: true
- multivalued: false
inlined: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__entities
required: true
- multivalued: false
inlined: true
objects:
name: objects
@@ -53,14 +50,12 @@ classes:
to external resources.
range: ExternalResources__objects
required: true
- multivalued: false
inlined: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: ExternalResources__object_keys
required: true
- multivalued: false
inlined: true
tree_root: true
ExternalResources__keys:
@@ -84,7 +79,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__files:
name: ExternalResources__files
description: A table for storing object ids of files used in external resources.
@@ -105,7 +99,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__entities:
name: ExternalResources__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -125,7 +118,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
entity_id:
name: entity_id
description: The compact uniform resource identifier (CURIE) of the entity,
@@ -134,7 +126,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
entity_uri:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
@@ -143,7 +134,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__objects:
name: ExternalResources__objects
description: A table for identifying which objects in a file contain references
@@ -165,7 +155,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
object_id:
name: object_id
description: The object id (UUID) of the object.
@@ -173,7 +162,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
object_type:
name: object_type
description: The data type of the object.
@@ -181,7 +169,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
relative_path:
name: relative_path
description: The relative path from the data object with the `object_id` to
@@ -192,7 +179,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
field:
name: field
description: The field within the compound data type using an external resource.
@@ -202,7 +188,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__object_keys:
name: ExternalResources__object_keys
description: A table for identifying which objects use which keys.
@@ -223,7 +208,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
@@ -231,4 +215,3 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml
index 0a824ca..dcf2549 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml
index 75f3938..bc36101 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml
@@ -31,21 +31,18 @@ classes:
resources.
range: ExternalResources__keys
required: true
- multivalued: false
inlined: true
files:
name: files
description: A table for storing object ids of files used in external resources.
range: ExternalResources__files
required: true
- multivalued: false
inlined: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__entities
required: true
- multivalued: false
inlined: true
objects:
name: objects
@@ -53,21 +50,18 @@ classes:
to external resources.
range: ExternalResources__objects
required: true
- multivalued: false
inlined: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: ExternalResources__object_keys
required: true
- multivalued: false
inlined: true
entity_keys:
name: entity_keys
description: A table for identifying which keys use which entity.
range: ExternalResources__entity_keys
required: true
- multivalued: false
inlined: true
tree_root: true
ExternalResources__keys:
@@ -91,7 +85,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__files:
name: ExternalResources__files
description: A table for storing object ids of files used in external resources.
@@ -112,7 +105,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__entities:
name: ExternalResources__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -133,7 +125,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
entity_uri:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
@@ -142,7 +133,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__objects:
name: ExternalResources__objects
description: A table for identifying which objects in a file contain references
@@ -164,7 +154,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
object_id:
name: object_id
description: The object id (UUID) of the object.
@@ -172,7 +161,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
object_type:
name: object_type
description: The data type of the object.
@@ -180,7 +168,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
relative_path:
name: relative_path
description: The relative path from the data object with the `object_id` to
@@ -191,7 +178,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
field:
name: field
description: The field within the compound data type using an external resource.
@@ -201,7 +187,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
ExternalResources__object_keys:
name: ExternalResources__object_keys
description: A table for identifying which objects use which keys.
@@ -222,7 +207,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
@@ -230,7 +214,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
ExternalResources__entity_keys:
name: ExternalResources__entity_keys
description: A table for identifying which keys use which entity.
@@ -250,7 +233,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
@@ -258,4 +240,3 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
index 0a824ca..dcf2549 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
@@ -66,6 +66,7 @@ types:
numeric:
name: numeric
typeof: float
+ repr: float | int
text:
name: text
typeof: string
@@ -87,6 +88,9 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
+ dict:
+ name: dict
+ repr: dict
classes:
AnyType:
name: AnyType
diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
index dcaf960..6dfe7fe 100644
--- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
+++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
@@ -30,21 +30,18 @@ classes:
resources.
range: HERD__keys
required: true
- multivalued: false
inlined: true
files:
name: files
description: A table for storing object ids of files used in external resources.
range: HERD__files
required: true
- multivalued: false
inlined: true
entities:
name: entities
description: A table for mapping user terms (i.e., keys) to resource entities.
range: HERD__entities
required: true
- multivalued: false
inlined: true
objects:
name: objects
@@ -52,21 +49,18 @@ classes:
to external resources.
range: HERD__objects
required: true
- multivalued: false
inlined: true
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: HERD__object_keys
required: true
- multivalued: false
inlined: true
entity_keys:
name: entity_keys
description: A table for identifying which keys use which entity.
range: HERD__entity_keys
required: true
- multivalued: false
inlined: true
tree_root: true
HERD__keys:
@@ -90,7 +84,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
HERD__files:
name: HERD__files
description: A table for storing object ids of files used in external resources.
@@ -111,7 +104,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
HERD__entities:
name: HERD__entities
description: A table for mapping user terms (i.e., keys) to resource entities.
@@ -132,7 +124,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
entity_uri:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
@@ -141,7 +132,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
HERD__objects:
name: HERD__objects
description: A table for identifying which objects in a file contain references
@@ -163,7 +153,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
object_id:
name: object_id
description: The object id (UUID) of the object.
@@ -171,7 +160,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
object_type:
name: object_type
description: The data type of the object.
@@ -179,7 +167,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
relative_path:
name: relative_path
description: The relative path from the data object with the `object_id` to
@@ -190,7 +177,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
field:
name: field
description: The field within the compound data type using an external resource.
@@ -200,7 +186,6 @@ classes:
exact_number_dimensions: 1
range: text
required: true
- multivalued: false
HERD__object_keys:
name: HERD__object_keys
description: A table for identifying which objects use which keys.
@@ -221,7 +206,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
@@ -229,7 +213,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
HERD__entity_keys:
name: HERD__entity_keys
description: A table for identifying which keys use which entity.
@@ -249,7 +232,6 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
@@ -257,4 +239,3 @@ classes:
exact_number_dimensions: 1
range: uint
required: true
- multivalued: false
diff --git a/nwb_schema_language/Makefile b/nwb_schema_language/Makefile
index 9d6f45f..2f8cd76 100644
--- a/nwb_schema_language/Makefile
+++ b/nwb_schema_language/Makefile
@@ -6,7 +6,7 @@ SHELL := bash
.SUFFIXES:
.SECONDARY:
-RUN = poetry run
+RUN = pdm run
# get values from about.yaml file
SCHEMA_NAME = $(shell ${SHELL} ./utils/get-value.sh name)
SOURCE_SCHEMA_PATH = $(shell ${SHELL} ./utils/get-value.sh source_schema_path)
@@ -107,7 +107,7 @@ gen-project: $(PYMODEL)
$(RUN) gen-project ${GEN_PARGS} -d $(DEST) $(SOURCE_SCHEMA_PATH) && mv $(DEST)/*.py $(PYMODEL)
gen-pydantic: $(PYMODEL)
- $(RUN) gen-pydantic $(SOURCE_SCHEMA_PATH) --pydantic_version 2 > $(PYMODEL)/nwb_schema_pydantic.py
+ $(RUN) generate_pydantic
$(RUN) run_patches --phase post_generation_pydantic
test: test-schema test-python test-examples
diff --git a/nwb_schema_language/pyproject.toml b/nwb_schema_language/pyproject.toml
index 1a59159..b912c77 100644
--- a/nwb_schema_language/pyproject.toml
+++ b/nwb_schema_language/pyproject.toml
@@ -9,7 +9,7 @@ dependencies = [
"linkml-runtime>=1.7.7",
"pydantic>=2.3.0",
]
-version = "0.1.3"
+version = "0.2.0"
description = "Translation of the nwb-schema-language to LinkML"
readme = "README.md"
@@ -20,6 +20,7 @@ documentation = "https://nwb-linkml.readthedocs.io"
[project.scripts]
run_patches = "nwb_schema_language.patches:main"
+generate_pydantic = "nwb_schema_language.generator:generate"
[tool.pdm]
[tool.pdm.dev-dependencies]
diff --git a/nwb_schema_language/src/nwb_schema_language/__init__.py b/nwb_schema_language/src/nwb_schema_language/__init__.py
index 653b6ff..d211475 100644
--- a/nwb_schema_language/src/nwb_schema_language/__init__.py
+++ b/nwb_schema_language/src/nwb_schema_language/__init__.py
@@ -22,10 +22,10 @@ try:
DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype]
-except (NameError, RecursionError):
+except (NameError, RecursionError) as e:
warnings.warn(
"Error importing pydantic classes, passing because we might be in the process of patching"
- " them, but it is likely they are broken and you will be unable to use them!",
+ f" them, but it is likely they are broken and you will be unable to use them!\n{e}",
stacklevel=1,
)
diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
index 84132d0..83f084c 100644
--- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
+++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
@@ -1,14 +1,14 @@
from __future__ import annotations
-from datetime import datetime, date
-from enum import Enum
-from typing import List, Dict, Optional, Any, Union
-from pydantic import BaseModel as BaseModel, Field
-import sys
-if sys.version_info >= (3, 8):
- from typing import Literal
-else:
- from typing_extensions import Literal
+import re
+import sys
+from datetime import date, datetime, time
+from decimal import Decimal
+from enum import Enum
+from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
+
+from nwb_schema_language.util import pformat
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
@@ -16,11 +16,90 @@ version = "None"
class ConfiguredBaseModel(BaseModel):
- pass
+ model_config = ConfigDict(
+ validate_assignment=False,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+
+ def __repr__(self):
+ return pformat(self.model_dump(exclude={"parent": True}), self.__class__.__name__)
+
+ def __str__(self):
+ return repr(self)
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+class ParentizeMixin(BaseModel):
+ """Mixin to populate the parent field for nested datasets and groups"""
+
+ @model_validator(mode="after")
+ def parentize(self) -> BaseModel:
+ """Set the parent attribute for all our fields they have one"""
+ for field_name in self.model_fields:
+ if field_name == "parent":
+ continue
+ field = getattr(self, field_name)
+ if not isinstance(field, list):
+ field = [field]
+ for item in field:
+ if hasattr(item, "parent"):
+ item.parent = self
+
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "default_prefix": "nwb_schema_language",
+ "default_range": "string",
+ "description": "Translation of the nwb-schema-language to LinkML",
+ "id": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "imports": ["linkml:types"],
+ "license": "GNU GPL v3.0",
+ "name": "nwb-schema-language",
+ "prefixes": {
+ "linkml": {"prefix_prefix": "linkml", "prefix_reference": "https://w3id.org/linkml/"},
+ "nwb_schema_language": {
+ "prefix_prefix": "nwb_schema_language",
+ "prefix_reference": "https://w3id.org/p2p_ld/nwb-schema-language/",
+ },
+ "schema": {"prefix_prefix": "schema", "prefix_reference": "http://schema.org/"},
+ },
+ "see_also": ["https://p2p_ld.github.io/nwb-schema-language"],
+ "settings": {
+ "email": {"setting_key": "email", "setting_value": "\\S+@\\S+{\\.\\w}+"},
+ "protected_string": {
+ "setting_key": "protected_string",
+ "setting_value": "^[A-Za-z_][A-Za-z0-9_]*$",
+ },
+ },
+ "source_file": "/Users/jonny/git/p2p-ld/nwb-linkml/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml",
+ "title": "nwb-schema-language",
+ }
+)
class ReftypeOptions(str, Enum):
-
# Reference to another group or dataset of the given target_type
ref = "ref"
# Reference to another group or dataset of the given target_type
@@ -32,7 +111,6 @@ class ReftypeOptions(str, Enum):
class QuantityEnum(str, Enum):
-
# Zero or more instances, equivalent to zero_or_many
ASTERISK = "*"
# Zero or one instances, equivalent to zero_or_one
@@ -48,7 +126,6 @@ class QuantityEnum(str, Enum):
class FlatDtype(str, Enum):
-
# single precision floating point (32 bit)
float = "float"
# single precision floating point (32 bit)
@@ -100,164 +177,842 @@ class FlatDtype(str, Enum):
class Namespace(ConfiguredBaseModel):
-
- doc: str = Field(..., description="""Description of corresponding object.""")
- name: str = Field(...)
- full_name: Optional[str] = Field(
- None, description="""Optional string with extended full name for the namespace."""
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {
+ "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "slot_usage": {"name": {"name": "name", "required": True}},
+ }
+ )
+
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
+ )
+ name: str = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ full_name: Optional[str] = Field(
+ None,
+ description="""Optional string with extended full name for the namespace.""",
+ json_schema_extra={"linkml_meta": {"alias": "full_name", "domain_of": ["Namespace"]}},
+ )
+ version: str = Field(
+ ..., json_schema_extra={"linkml_meta": {"alias": "version", "domain_of": ["Namespace"]}}
)
- version: str = Field(...)
date: Optional[datetime] = Field(
- None, description="""Date that a namespace was last modified or released"""
+ None,
+ description="""Date that a namespace was last modified or released""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "date",
+ "domain_of": ["Namespace"],
+ "examples": [{"value": "2017-04-25 17:14:13"}],
+ "slot_uri": "schema:dateModified",
+ }
+ },
)
author: List[str] | str = Field(
- default_factory=list,
+ ...,
description="""List of strings with the names of the authors of the namespace.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "author",
+ "domain_of": ["Namespace"],
+ "slot_uri": "schema:author",
+ }
+ },
)
contact: List[str] | str = Field(
- default_factory=list,
+ ...,
description="""List of strings with the contact information for the authors. Ordering of the contacts should match the ordering of the authors.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "contact",
+ "domain_of": ["Namespace"],
+ "slot_uri": "schema:email",
+ "structured_pattern": {"interpolated": True, "syntax": "{email}"},
+ }
+ },
)
schema_: Optional[List[Schema]] = Field(
+ None,
alias="schema",
- default_factory=list,
description="""List of the schema to be included in this namespace.""",
+ json_schema_extra={"linkml_meta": {"alias": "schema_", "domain_of": ["Namespace"]}},
)
class Namespaces(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
- namespaces: Optional[List[Namespace]] = Field(default_factory=list)
+ namespaces: Optional[List[Namespace]] = Field(
+ None,
+ json_schema_extra={"linkml_meta": {"alias": "namespaces", "domain_of": ["Namespaces"]}},
+ )
class Schema(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {
+ "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "rules": [
+ {
+ "description": "If namespace is absent, source is required",
+ "postconditions": {
+ "slot_conditions": {"source": {"name": "source", "required": True}}
+ },
+ "preconditions": {
+ "slot_conditions": {
+ "namespace": {"name": "namespace", "value_presence": "ABSENT"}
+ }
+ },
+ },
+ {
+ "description": "If source is absent, namespace is required.",
+ "postconditions": {
+ "slot_conditions": {"namespace": {"name": "namespace", "required": True}}
+ },
+ "preconditions": {
+ "slot_conditions": {
+ "source": {"name": "source", "value_presence": "ABSENT"}
+ }
+ },
+ },
+ {
+ "description": "If namespace is present, source is cannot be",
+ "postconditions": {
+ "slot_conditions": {
+ "source": {"name": "source", "value_presence": "ABSENT"}
+ }
+ },
+ "preconditions": {
+ "slot_conditions": {
+ "namespace": {"name": "namespace", "value_presence": "PRESENT"}
+ }
+ },
+ },
+ {
+ "description": "If source is present, namespace cannot be.",
+ "postconditions": {
+ "slot_conditions": {
+ "namespace": {"name": "namespace", "value_presence": "ABSENT"}
+ }
+ },
+ "preconditions": {
+ "slot_conditions": {
+ "source": {"name": "source", "value_presence": "PRESENT"}
+ }
+ },
+ },
+ ],
+ }
+ )
source: Optional[str] = Field(
None,
description="""describes the name of the YAML (or JSON) file with the schema specification. The schema files should be located in the same folder as the namespace file.""",
+ json_schema_extra={"linkml_meta": {"alias": "source", "domain_of": ["Schema"]}},
)
namespace: Optional[str] = Field(
None,
description="""describes a named reference to another namespace. In contrast to source, this is a reference by name to a known namespace (i.e., the namespace is resolved during the build and must point to an already existing namespace). This mechanism is used to allow, e.g., extension of a core namespace (here the NWB core namespace) without requiring hard paths to the files describing the core namespace. Either source or namespace must be specified, but not both.""",
+ json_schema_extra={"linkml_meta": {"alias": "namespace", "domain_of": ["Schema"]}},
)
title: Optional[str] = Field(
- None, description="""a descriptive title for a file for documentation purposes."""
+ None,
+ description="""a descriptive title for a file for documentation purposes.""",
+ json_schema_extra={"linkml_meta": {"alias": "title", "domain_of": ["Schema"]}},
)
neurodata_types: Optional[List[Union[Dataset, Group]]] = Field(
- default_factory=list,
+ None,
description="""an optional list of strings indicating which data types should be included from the given specification source or namespace. The default is null indicating that all data types should be included.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "neurodata_types",
+ "any_of": [{"range": "Dataset"}, {"range": "Group"}],
+ "domain_of": ["Schema"],
+ }
+ },
+ )
+ doc: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
)
- doc: Optional[str] = Field(None)
-class Group(ConfiguredBaseModel):
+class Group(ConfiguredBaseModel, ParentizeMixin):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
neurodata_type_def: Optional[str] = Field(
None,
description="""Used alongside neurodata_type_inc to indicate inheritance, naming, and mixins""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "neurodata_type_def",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
)
neurodata_type_inc: Optional[str] = Field(
None,
description="""Used alongside neurodata_type_def to indicate inheritance, naming, and mixins""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "neurodata_type_inc",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ name: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ default_name: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "default_name",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
+ )
+ quantity: Optional[Union[QuantityEnum, int]] = Field(
+ "1",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "quantity",
+ "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}],
+ "domain_of": ["Group", "Link", "Dataset"],
+ "ifabsent": "int(1)",
+ "todos": [
+ "logic to check that the corresponding class can only be "
+ "implemented quantity times."
+ ],
+ }
+ },
+ )
+ linkable: Optional[bool] = Field(
+ None,
+ json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}},
+ )
+ attributes: Optional[List[Attribute]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}
+ },
+ )
+ datasets: Optional[List[Dataset]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}
+ },
+ )
+ groups: Optional[List[Group]] = Field(
+ None,
+ json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}},
+ )
+ links: Optional[List[Link]] = Field(
+ None, json_schema_extra={"linkml_meta": {"alias": "links", "domain_of": ["Group"]}}
+ )
+ parent: Optional[Group] = Field(
+ None,
+ exclude=True,
+ description="""The parent group that contains this dataset or group""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}
+ },
)
- name: Optional[str] = Field(None)
- default_name: Optional[str] = Field(None)
- doc: str = Field(..., description="""Description of corresponding object.""")
- quantity: Optional[Union[QuantityEnum, int]] = Field(1)
- linkable: Optional[bool] = Field(None)
- attributes: Optional[List[Attribute]] = Field(default_factory=list)
- datasets: Optional[List[Dataset]] = Field(default_factory=list)
- groups: Optional[List[Group]] = Field(default_factory=list)
- links: Optional[List[Link]] = Field(default_factory=list)
class Groups(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
- groups: Optional[List[Group]] = Field(default_factory=list)
+ groups: Optional[List[Group]] = Field(
+ None,
+ json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}},
+ )
class Link(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
- name: Optional[str] = Field(None)
- doc: str = Field(..., description="""Description of corresponding object.""")
+ name: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
+ )
target_type: str = Field(
...,
description="""Describes the neurodata_type of the target that the reference points to""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}
+ },
+ )
+ quantity: Optional[Union[QuantityEnum, int]] = Field(
+ "1",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "quantity",
+ "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}],
+ "domain_of": ["Group", "Link", "Dataset"],
+ "ifabsent": "int(1)",
+ "todos": [
+ "logic to check that the corresponding class can only be "
+ "implemented quantity times."
+ ],
+ }
+ },
)
- quantity: Optional[Union[QuantityEnum, int]] = Field(1)
class Datasets(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
- datasets: Optional[List[Dataset]] = Field(default_factory=list)
+ datasets: Optional[List[Dataset]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}
+ },
+ )
class ReferenceDtype(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}
+ )
target_type: str = Field(
...,
description="""Describes the neurodata_type of the target that the reference points to""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}
+ },
)
reftype: Optional[ReftypeOptions] = Field(
- None, description="""describes the kind of reference"""
+ None,
+ description="""describes the kind of reference""",
+ json_schema_extra={"linkml_meta": {"alias": "reftype", "domain_of": ["ReferenceDtype"]}},
)
class CompoundDtype(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {
+ "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "slot_usage": {
+ "dtype": {
+ "any_of": [{"range": "ReferenceDtype"}, {"range": "FlatDtype"}],
+ "multivalued": False,
+ "name": "dtype",
+ "required": True,
+ },
+ "name": {"name": "name", "required": True},
+ },
+ }
+ )
- name: str = Field(...)
- doc: str = Field(..., description="""Description of corresponding object.""")
- dtype: Union[FlatDtype, ReferenceDtype] = Field(...)
+ name: str = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
+ )
+ dtype: Union[FlatDtype, ReferenceDtype] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dtype",
+ "any_of": [{"range": "ReferenceDtype"}, {"range": "FlatDtype"}],
+ "domain_of": ["CompoundDtype", "DtypeMixin"],
+ }
+ },
+ )
class DtypeMixin(ConfiguredBaseModel):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {
+ "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "mixin": True,
+ "rules": [
+ {
+ "postconditions": {
+ "slot_conditions": {"dtype": {"multivalued": False, "name": "dtype"}}
+ },
+ "preconditions": {
+ "slot_conditions": {"dtype": {"name": "dtype", "range": "FlatDtype"}}
+ },
+ }
+ ],
+ }
+ )
dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
- default_factory=list
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dtype",
+ "any_of": [
+ {"range": "FlatDtype"},
+ {"range": "CompoundDtype"},
+ {"range": "ReferenceDtype"},
+ ],
+ "domain_of": ["CompoundDtype", "DtypeMixin"],
+ }
+ },
)
class Attribute(DtypeMixin):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {
+ "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language",
+ "mixins": ["DtypeMixin"],
+ "slot_usage": {
+ "name": {"name": "name", "required": True},
+ "parent": {"any_of": [{"range": "Group"}, {"range": "Dataset"}], "name": "parent"},
+ },
+ }
+ )
- name: str = Field(...)
- dims: Optional[List[Union[Any, str]]] = Field(None)
- shape: Optional[List[Union[Any, int, str]]] = Field(None)
+ name: str = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ dims: Optional[List[Union[Any, str]]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dims",
+ "any_of": [{"range": "string"}, {"range": "AnyType"}],
+ "domain_of": ["Attribute", "Dataset"],
+ "todos": [
+ "Can't quite figure out how to allow an array of arrays - see "
+ "https://github.com/linkml/linkml/issues/895"
+ ],
+ }
+ },
+ )
+ shape: Optional[List[Union[Any, int, str]]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "shape",
+ "any_of": [
+ {"minimum_value": 1, "range": "integer"},
+ {"equals_string": "null", "range": "string"},
+ {"range": "AnyType"},
+ ],
+ "domain_of": ["Attribute", "Dataset"],
+ "todos": [
+ "Can't quite figure out how to allow an array of arrays - see "
+ "https://github.com/linkml/linkml/issues/895"
+ ],
+ }
+ },
+ )
value: Optional[Any] = Field(
- None, description="""Optional constant, fixed value for the attribute."""
+ None,
+ description="""Optional constant, fixed value for the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}
+ },
)
default_value: Optional[Any] = Field(
- None, description="""Optional default value for variable-valued attributes."""
+ None,
+ description="""Optional default value for variable-valued attributes.""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}
+ },
+ )
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
)
- doc: str = Field(..., description="""Description of corresponding object.""")
required: Optional[bool] = Field(
True,
description="""Optional boolean key describing whether the attribute is required. Default value is True.""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "required", "domain_of": ["Attribute"], "ifabsent": "true"}
+ },
+ )
+ parent: Optional[Union[Dataset, Group]] = Field(
+ None,
+ exclude=True,
+ description="""The parent group that contains this dataset or group""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "parent",
+ "any_of": [{"range": "Group"}, {"range": "Dataset"}],
+ "domain_of": ["Group", "Attribute", "Dataset"],
+ }
+ },
+ )
+ dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dtype",
+ "any_of": [
+ {"range": "FlatDtype"},
+ {"range": "CompoundDtype"},
+ {"range": "ReferenceDtype"},
+ ],
+ "domain_of": ["CompoundDtype", "DtypeMixin"],
+ }
+ },
)
- dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None)
-class Dataset(DtypeMixin):
+class Dataset(ConfiguredBaseModel, ParentizeMixin):
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", "mixins": ["DtypeMixin"]}
+ )
neurodata_type_def: Optional[str] = Field(
None,
description="""Used alongside neurodata_type_inc to indicate inheritance, naming, and mixins""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "neurodata_type_def",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
)
neurodata_type_inc: Optional[str] = Field(
None,
description="""Used alongside neurodata_type_def to indicate inheritance, naming, and mixins""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "neurodata_type_inc",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ name: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "name",
+ "domain_of": [
+ "Namespace",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ default_name: Optional[str] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "default_name",
+ "domain_of": ["Group", "Dataset"],
+ "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"},
+ }
+ },
+ )
+ dims: Optional[List[Union[Any, str]]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dims",
+ "any_of": [{"range": "string"}, {"range": "AnyType"}],
+ "domain_of": ["Attribute", "Dataset"],
+ "todos": [
+ "Can't quite figure out how to allow an array of arrays - see "
+ "https://github.com/linkml/linkml/issues/895"
+ ],
+ }
+ },
+ )
+ shape: Optional[List[Union[Any, int, str]]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "shape",
+ "any_of": [
+ {"minimum_value": 1, "range": "integer"},
+ {"equals_string": "null", "range": "string"},
+ {"range": "AnyType"},
+ ],
+ "domain_of": ["Attribute", "Dataset"],
+ "todos": [
+ "Can't quite figure out how to allow an array of arrays - see "
+ "https://github.com/linkml/linkml/issues/895"
+ ],
+ }
+ },
)
- name: Optional[str] = Field(None)
- default_name: Optional[str] = Field(None)
- dims: Optional[List[Union[Any, str]]] = Field(None)
- shape: Optional[List[Union[Any, int, str]]] = Field(None)
value: Optional[Any] = Field(
- None, description="""Optional constant, fixed value for the attribute."""
+ None,
+ description="""Optional constant, fixed value for the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}
+ },
)
default_value: Optional[Any] = Field(
- None, description="""Optional default value for variable-valued attributes."""
+ None,
+ description="""Optional default value for variable-valued attributes.""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}
+ },
)
- doc: str = Field(..., description="""Description of corresponding object.""")
- quantity: Optional[Union[QuantityEnum, int]] = Field(1)
- linkable: Optional[bool] = Field(None)
- attributes: Optional[List[Attribute]] = Field(None)
- dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None)
+ doc: str = Field(
+ ...,
+ description="""Description of corresponding object.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "doc",
+ "domain_of": [
+ "Namespace",
+ "Schema",
+ "Group",
+ "Attribute",
+ "Link",
+ "Dataset",
+ "CompoundDtype",
+ ],
+ }
+ },
+ )
+ quantity: Optional[Union[QuantityEnum, int]] = Field(
+ "1",
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "quantity",
+ "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}],
+ "domain_of": ["Group", "Link", "Dataset"],
+ "ifabsent": "int(1)",
+ "todos": [
+ "logic to check that the corresponding class can only be "
+ "implemented quantity times."
+ ],
+ }
+ },
+ )
+ linkable: Optional[bool] = Field(
+ None,
+ json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}},
+ )
+ attributes: Optional[List[Attribute]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}
+ },
+ )
+ parent: Optional[Group] = Field(
+ None,
+ exclude=True,
+ description="""The parent group that contains this dataset or group""",
+ json_schema_extra={
+ "linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}
+ },
+ )
+ dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "alias": "dtype",
+ "any_of": [
+ {"range": "FlatDtype"},
+ {"range": "CompoundDtype"},
+ {"range": "ReferenceDtype"},
+ ],
+ "domain_of": ["CompoundDtype", "DtypeMixin"],
+ }
+ },
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Namespace.model_rebuild()
+Namespaces.model_rebuild()
+Schema.model_rebuild()
+Group.model_rebuild()
+Groups.model_rebuild()
+Link.model_rebuild()
+Datasets.model_rebuild()
+ReferenceDtype.model_rebuild()
+CompoundDtype.model_rebuild()
+DtypeMixin.model_rebuild()
+Attribute.model_rebuild()
+Dataset.model_rebuild()
diff --git a/nwb_schema_language/src/nwb_schema_language/generator.py b/nwb_schema_language/src/nwb_schema_language/generator.py
new file mode 100644
index 0000000..7b0d289
--- /dev/null
+++ b/nwb_schema_language/src/nwb_schema_language/generator.py
@@ -0,0 +1,84 @@
+"""
+Customization of linkml pydantic generator
+"""
+
+from dataclasses import dataclass
+from pathlib import Path
+
+from linkml.generators.pydanticgen import PydanticGenerator
+from linkml.generators.pydanticgen.build import ClassResult
+from linkml.generators.pydanticgen.template import Import, ObjectImport
+from linkml_runtime import SchemaView
+from pydantic import BaseModel, model_validator
+
+
+class ParentizeMixin(BaseModel):
+ """Mixin to populate the parent field for nested datasets and groups"""
+
+ @model_validator(mode="after")
+ def parentize(self) -> BaseModel:
+ """Set the parent attribute for all our fields they have one"""
+ for field_name in self.model_fields:
+ if field_name == "parent":
+ continue
+ field = getattr(self, field_name)
+ if not isinstance(field, list):
+ field = [field]
+ for item in field:
+ if hasattr(item, "parent"):
+ item.parent = self
+
+ return self
+
+
+STR_METHOD = """
+ def __repr__(self):
+ return pformat(
+ self.model_dump(
+ exclude={"parent": True},
+ exclude_unset=True,
+ exclude_None=True
+ ),
+ self.__class__.__name__
+ )
+
+ def __str__(self):
+ return repr(self)
+"""
+
+
+@dataclass
+class NWBSchemaLangGenerator(PydanticGenerator):
+ """
+ Customization of linkml pydantic generator
+ """
+
+ def __init__(self, *args, **kwargs):
+ kwargs["injected_classes"] = [ParentizeMixin]
+ kwargs["imports"] = [
+ Import(module="pydantic", objects=[ObjectImport(name="model_validator")]),
+ Import(module="nwb_schema_language.util", objects=[ObjectImport(name="pformat")]),
+ ]
+ kwargs["injected_fields"] = [STR_METHOD]
+ kwargs["black"] = True
+ super().__init__(*args, **kwargs)
+
+ def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult:
+ """
+ Add the ParentizeMixin to the bases of Dataset and Group
+ """
+ if cls.cls.name in ("Dataset", "Group"):
+ cls.cls.bases = ["ConfiguredBaseModel", "ParentizeMixin"]
+ return cls
+
+
+def generate() -> None:
+ """
+ Generate pydantic models for nwb_schema_language
+ """
+ schema = Path(__file__).parent / "schema" / "nwb_schema_language.yaml"
+ output = Path(__file__).parent / "datamodel" / "nwb_schema_pydantic.py"
+ generator = NWBSchemaLangGenerator(schema=schema)
+ generated = generator.serialize()
+ with open(output, "w") as ofile:
+ ofile.write(generated)
diff --git a/nwb_schema_language/src/nwb_schema_language/patches.py b/nwb_schema_language/src/nwb_schema_language/patches.py
index 1b2c9a5..f6fa5b1 100644
--- a/nwb_schema_language/src/nwb_schema_language/patches.py
+++ b/nwb_schema_language/src/nwb_schema_language/patches.py
@@ -49,8 +49,15 @@ class Patch:
patch_schema_slot = Patch(
phase=Phases.post_generation_pydantic,
path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"),
- match=r"\n\s*(schema:)(.*Field\()(.*)",
- replacement=r'\n schema_:\2alias="schema", \3',
+ match=r"\n\s*(schema:)(.*Field\(\n\s*None,\n)(.*)",
+ replacement=r'\n schema_:\2 alias="schema",\n\3',
+)
+
+patch_schema_slot_no_newline = Patch(
+ phase=Phases.post_generation_pydantic,
+ path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"),
+ match=r"\n\s*(schema:)(.*Field\(None,)(.*)",
+ replacement=r'\n schema_:\2 alias="schema", \3',
)
patch_dtype_single_multiple = Patch(
@@ -74,6 +81,20 @@ patch_contact_single_multiple = Patch(
replacement="contact: List[str] | str",
)
+patch_validate_assignment = Patch(
+ phase=Phases.post_generation_pydantic,
+ path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"),
+ match=r"validate_assignment=True",
+ replacement="validate_assignment=False",
+)
+
+patch_exclude_parent = Patch(
+ phase=Phases.post_generation_pydantic,
+ path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"),
+ match=r"(parent:.*Field\(\n\s*None,\n)(.*)",
+ replacement=r"\1 exclude=True,\n\2",
+)
+
def run_patches(phase: Phases, verbose: bool = False) -> None:
"""
diff --git a/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml b/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml
index ff06a56..00c9aa9 100644
--- a/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml
+++ b/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml
@@ -78,6 +78,7 @@ classes:
- datasets
- groups
- links
+ - parent
Groups:
slots:
@@ -94,9 +95,14 @@ classes:
- default_value
- doc
- required
+ - parent
slot_usage:
name:
required: true
+ parent:
+ any_of:
+ - range: Group
+ - range: Dataset
Link:
slots:
@@ -121,6 +127,7 @@ classes:
- quantity
- linkable
- attributes
+ - parent
Datasets:
slots:
@@ -177,7 +184,7 @@ slots:
description: Optional string with extended full name for the namespace.
version:
required: true
- pattern: "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"
+# pattern: "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"
date:
range: datetime
slot_uri: schema:dateModified
@@ -207,7 +214,6 @@ slots:
# schema
source:
description: describes the name of the YAML (or JSON) file with the schema specification. The schema files should be located in the same folder as the namespace file.
- pattern: ".*\\.(yml|yaml|json)"
namespace:
description: describes a named reference to another namespace. In contrast to source, this is a reference by name to a known namespace (i.e., the namespace is resolved during the build and must point to an already existing namespace). This mechanism is used to allow, e.g., extension of a core namespace (here the NWB core namespace) without requiring hard paths to the files describing the core namespace. Either source or namespace must be specified, but not both.
namespaces:
@@ -312,6 +318,11 @@ slots:
description: describes the kind of reference
range: reftype_options
+ # extra - not defined in nwb-schema-language but useful when working with class objects
+ parent:
+ description: The parent group that contains this dataset or group
+ range: Group
+ required: false
enums:
diff --git a/nwb_schema_language/src/nwb_schema_language/util.py b/nwb_schema_language/src/nwb_schema_language/util.py
new file mode 100644
index 0000000..ef910a8
--- /dev/null
+++ b/nwb_schema_language/src/nwb_schema_language/util.py
@@ -0,0 +1,49 @@
+"""
+The fabled junk drawer
+"""
+
+import re
+import textwrap
+from pprint import pformat as _pformat
+
+
+def pformat(fields: dict, cls_name: str, indent: str = " ") -> str:
+ """
+ pretty format the fields of the items of a ``YAMLRoot`` object without the
+ wonky indentation of pformat.
+
+ formatting is similar to black -
+ items at similar levels of nesting have similar levels of indentation,
+ rather than getting placed at essentially random levels of indentation
+ depending on what came before them.
+ """
+ res = []
+ total_len = 0
+ for key, val in fields.items():
+ if val == [] or val == {} or val is None:
+ continue
+ # pformat handles everything else that isn't a YAMLRoot object,
+ # but it sure does look ugly
+ # use it to split lines and as the thing of last resort,
+ # but otherwise indent = 0, we'll do that
+ val_str = _pformat(val, indent=0, compact=True, sort_dicts=False)
+ # now we indent everything except the first line by indenting
+ # and then using regex to remove just the first indent
+ val_str = re.sub(rf"\A{re.escape(indent)}", "", textwrap.indent(val_str, indent))
+ # now recombine with the key in a format that can be re-eval'd
+ # into an object if indent is just whitespace
+ val_str = f"'{key}': " + val_str
+
+ # count the total length of this string so we know if we need to linebreak or not later
+ total_len += len(val_str)
+ res.append(val_str)
+
+ if total_len > 80:
+ inside = ",\n".join(res)
+ # we indent twice - once for the inner contents of every inner object, and one to
+ # offset from the root element. that keeps us from needing to be recursive except for the
+ # single pformat call
+ inside = textwrap.indent(inside, indent)
+ return cls_name + "({\n" + inside + "\n})"
+ else:
+ return cls_name + "({" + ", ".join(res) + "})"
diff --git a/nwb_schema_language/tests/test_data.py b/nwb_schema_language/tests/test_data.py
deleted file mode 100644
index b2f7030..0000000
--- a/nwb_schema_language/tests/test_data.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""Data test."""
-
-import os
-import glob
-import unittest
-
-from linkml_runtime.loaders import yaml_loader
-from nwb_schema_language.datamodel.nwb_schema_language import Namespaces
-
-ROOT = os.path.join(os.path.dirname(__file__), "..")
-DATA_DIR = os.path.join(ROOT, "src", "data", "tests")
-
-EXAMPLE_FILES = glob.glob(os.path.join(DATA_DIR, "*.yaml"))
-
-
-class TestData(unittest.TestCase):
- """Test data and datamodel."""
-
- def test_namespaces(self):
- """Date test."""
- namespace_file = [f for f in EXAMPLE_FILES if "namespace.yaml" in f][0]
- obj = yaml_loader.load(namespace_file, target_class=Namespaces)
- assert obj
diff --git a/nwb_schema_language/tests/test_mixins.py b/nwb_schema_language/tests/test_mixins.py
new file mode 100644
index 0000000..ba98e6e
--- /dev/null
+++ b/nwb_schema_language/tests/test_mixins.py
@@ -0,0 +1,31 @@
+from nwb_schema_language import Group, Dataset, Attribute
+
+
+def test_parentize_mixin():
+ """
+ the parentize mixin should populate the "parent" attribute for applicable children
+ """
+ dset_attr = Attribute(name="dset_attr", doc="")
+ dset = Dataset(
+ name="dataset", doc="", attributes=[dset_attr, {"name": "dict_based_attr", "doc": ""}]
+ )
+ group_attr = Attribute(name="group_attr", doc="")
+ group = Group(
+ name="group",
+ doc="",
+ attributes=[group_attr, {"name": "dict_based_attr", "doc": ""}],
+ datasets=[dset, {"name": "dict_based_dset", "doc": ""}],
+ )
+
+ assert dset_attr.parent is dset
+ assert dset.attributes[1].name == "dict_based_attr"
+ assert dset.attributes[1].parent is dset
+ assert dset.parent is group
+ assert group_attr.parent is group
+ assert group.attributes[1].name == "dict_based_attr"
+ assert group.attributes[1].parent is group
+ assert group.datasets[1].name == "dict_based_dset"
+ assert group.datasets[1].parent is group
+
+ dumped = group.model_dump()
+ assert "parent" not in dumped
diff --git a/scripts/generate_core.py b/scripts/generate_core.py
index 4aeb21a..53a7574 100644
--- a/scripts/generate_core.py
+++ b/scripts/generate_core.py
@@ -20,36 +20,6 @@ from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo
from nwb_linkml.io import schema as io
-def generate_core_yaml(output_path: Path, dry_run: bool = False, hdmf_only: bool = False):
- """Just build the latest version of the core schema"""
-
- core = io.load_nwb_core(hdmf_only=hdmf_only)
- built_schemas = core.build().schemas
- for schema in built_schemas:
- output_file = output_path / (schema.name + ".yaml")
- if not dry_run:
- yaml_dumper.dump(schema, output_file)
-
-
-def generate_core_pydantic(yaml_path: Path, output_path: Path, dry_run: bool = False):
- """Just generate the latest version of the core schema"""
- for schema in yaml_path.glob("*.yaml"):
- python_name = schema.stem.replace(".", "_").replace("-", "_")
- pydantic_file = (output_path / python_name).with_suffix(".py")
-
- generator = NWBPydanticGenerator(
- str(schema),
- pydantic_version="2",
- emit_metadata=True,
- gen_classvars=True,
- gen_slots=True,
- )
- gen_pydantic = generator.serialize()
- if not dry_run:
- with open(pydantic_file, "w") as pfile:
- pfile.write(gen_pydantic)
-
-
def make_tmp_dir(clear: bool = False) -> Path:
# use a directory underneath this one as the temporary directory rather than
# the default hidden one
@@ -67,8 +37,8 @@ def generate_versions(
pydantic_path: Path,
dry_run: bool = False,
repo: GitRepo = NWB_CORE_REPO,
- hdmf_only=False,
pdb=False,
+ latest: bool = False,
):
"""
Generate linkml models for all versions
@@ -83,8 +53,13 @@ def generate_versions(
failed_versions = {}
+ if latest:
+ versions = [repo.namespace.versions[-1]]
+ else:
+ versions = repo.namespace.versions
+
overall_progress = Progress()
- overall_task = overall_progress.add_task("All Versions", total=len(NWB_CORE_REPO.versions))
+ overall_task = overall_progress.add_task("All Versions", total=len(versions))
build_progress = Progress(
TextColumn(
@@ -101,7 +76,7 @@ def generate_versions(
linkml_task = None
pydantic_task = None
- for version in repo.namespace.versions:
+ for version in versions:
# build linkml
try:
# check out the version (this should also refresh the hdmf-common schema)
@@ -252,11 +227,10 @@ def main():
if not args.dry_run:
args.yaml.mkdir(exist_ok=True)
args.pydantic.mkdir(exist_ok=True)
- if args.latest:
- generate_core_yaml(args.yaml, args.dry_run, args.hdmf)
- generate_core_pydantic(args.yaml, args.pydantic, args.dry_run)
- else:
- generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf, pdb=args.pdb)
+
+ generate_versions(
+ args.yaml, args.pydantic, args.dry_run, repo, pdb=args.pdb, latest=args.latest
+ )
if __name__ == "__main__":