From 880352d9a4470bd67b45007d25fe2754388adda7 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 12 Sep 2024 22:40:14 -0700 Subject: [PATCH 01/18] v0.2.0 of nwb_schema_language - parentization --- .github/workflows/tests.yml | 4 + nwb_linkml/pyproject.toml | 2 +- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 4 + nwb_linkml/src/nwb_linkml/adapters/group.py | 20 +- .../src/nwb_linkml/adapters/namespaces.py | 65 +- .../test_adapters/test_adapter_namespaces.py | 80 ++- nwb_schema_language/Makefile | 4 +- nwb_schema_language/pyproject.toml | 3 +- .../src/nwb_schema_language/__init__.py | 4 +- .../datamodel/nwb_schema_pydantic.py | 675 ++++++++++++++++-- .../src/nwb_schema_language/generator.py | 52 ++ .../src/nwb_schema_language/patches.py | 25 +- .../schema/nwb_schema_language.yaml | 15 +- nwb_schema_language/tests/test_data.py | 23 - nwb_schema_language/tests/test_mixins.py | 31 + 15 files changed, 893 insertions(+), 114 deletions(-) create mode 100644 nwb_schema_language/src/nwb_schema_language/generator.py delete mode 100644 nwb_schema_language/tests/test_data.py create mode 100644 nwb_schema_language/tests/test_mixins.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b1283e1..e89654a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -46,6 +46,10 @@ jobs: run: pytest working-directory: nwb_linkml + - name: Run nwb_schema_language Tests + run: pytest + working-directory: nwb_schema_language + - name: Coveralls Parallel uses: coverallsapp/github-action@v2.3.0 if: runner.os != 'macOS' diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index c8ccd36..edf3579 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -12,7 +12,7 @@ dependencies = [ "nwb-models>=0.2.0", "pyyaml>=6.0", "linkml-runtime>=1.7.7", - "nwb-schema-language>=0.1.3", + "nwb-schema-language>=0.2.0", "rich>=13.5.2", #"linkml>=1.7.10", "linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml", diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index acbc896..f7b4f2f 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -170,6 +170,10 @@ class Adapter(BaseModel): # so skip to avoid combinatoric walking if key == "imports" and type(input).__name__ == "SchemaAdapter": continue + # nwb_schema_language objects have a reference to their parent, + # which causes cycles + if key == "parent": + continue val = getattr(input, key) yield (key, val) if isinstance(val, (BaseModel, dict, list)): diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py index 0703aa0..f0e44ea 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/group.py +++ b/nwb_linkml/src/nwb_linkml/adapters/group.py @@ -29,7 +29,7 @@ class GroupAdapter(ClassAdapter): """ # Handle container groups with only * quantity unnamed groups if ( - len(self.cls.groups) > 0 + self.cls.groups and not self.cls.links and all([self._check_if_container(g) for g in self.cls.groups]) ): # and \ @@ -38,8 +38,8 @@ class GroupAdapter(ClassAdapter): # handle if we are a terminal container group without making a new class if ( - len(self.cls.groups) == 0 - and len(self.cls.datasets) == 0 + not self.cls.groups + and not self.cls.datasets and self.cls.neurodata_type_inc is not None and self.parent is not None ): @@ -177,15 +177,17 @@ class GroupAdapter(ClassAdapter): # Datasets are simple, they are terminal classes, and all logic # for creating slots vs. classes is handled by the adapter class dataset_res = BuildResult() - for dset in self.cls.datasets: - dset_adapter = DatasetAdapter(cls=dset, parent=self) - dataset_res += dset_adapter.build() + if self.cls.datasets: + for dset in self.cls.datasets: + dset_adapter = DatasetAdapter(cls=dset, parent=self) + dataset_res += dset_adapter.build() group_res = BuildResult() - for group in self.cls.groups: - group_adapter = GroupAdapter(cls=group, parent=self) - group_res += group_adapter.build() + if self.cls.groups: + for group in self.cls.groups: + group_adapter = GroupAdapter(cls=group, parent=self) + group_res += group_adapter.build() res = dataset_res + group_res diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index c6abd70..96d653e 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -9,11 +9,12 @@ import contextlib from copy import copy from pathlib import Path from pprint import pformat -from typing import Dict, List, Optional +from typing import Dict, Generator, List, Optional from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.linkml_model import Annotation, SchemaDefinition from pydantic import Field, model_validator +import networkx as nx from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.schema import SchemaAdapter @@ -31,6 +32,9 @@ class NamespacesAdapter(Adapter): schemas: List[SchemaAdapter] imported: List["NamespacesAdapter"] = Field(default_factory=list) + _completed: bool = False + """whether we have run the :meth:`.complete_namespace` method""" + @classmethod def from_yaml(cls, path: Path) -> "NamespacesAdapter": """ @@ -65,7 +69,7 @@ class NamespacesAdapter(Adapter): needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns) ns_adapter.imported.append(needed_adapter) - ns_adapter.populate_imports() + ns_adapter.complete_namespaces() return ns_adapter @@ -76,6 +80,9 @@ class NamespacesAdapter(Adapter): Build the NWB namespace to the LinkML Schema """ + if not self._completed: + self.complete_namespaces() + sch_result = BuildResult() for sch in self.schemas: if progress is not None: @@ -149,6 +156,50 @@ class NamespacesAdapter(Adapter): break return self + def complete_namespaces(self): + """ + After loading the namespace, and after any imports have been added afterwards, + this must be called to complete the definitions of the contained schema objects. + + This is not automatic because NWB doesn't have a formal dependency resolution system, + so it is often impossible to know which imports are needed until after the namespace + adapter has been instantiated. + + It **is** automatically called if it hasn't been already by the :meth:`.build` method. + """ + self.populate_imports() + self._roll_down_inheritance() + + for i in self.imported: + i.complete_namespaces() + + self._completed = True + + def _roll_down_inheritance(self): + """ + nwb-schema-language inheritance doesn't work like normal python inheritance - + instead of inheriting everything at the 'top level' of a class, it also + recursively merges all properties from the parent objects. + + References: + https://github.com/NeurodataWithoutBorders/pynwb/issues/1954 + """ + pass + + def inheritance_graph(self) -> nx.DiGraph: + """ + Make a graph of all ``neurodata_types`` in the namespace and imports such that + each node contains the group or dataset it describes, + and has directed edges pointing at all the classes that inherit from it. + + In the case that the inheriting class does not itself have a ``neurodata_type_def``, + it is + """ + g = nx.DiGraph() + for sch in self.all_schemas(): + for cls in sch.created_classes: + pass + def find_type_source(self, name: str) -> SchemaAdapter: """ Given some neurodata_type_inc, find the schema that it's defined in. @@ -279,3 +330,13 @@ class NamespacesAdapter(Adapter): if name in sources: return ns.name return None + + def all_schemas(self) -> Generator[SchemaAdapter, None, None]: + """ + Iterator over all schemas including imports + """ + for sch in self.schemas: + yield sch + for imported in self.imported: + for sch in imported: + yield sch diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py index bbcb739..768669b 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py @@ -1,6 +1,7 @@ import pytest - -from nwb_linkml.adapters import SchemaAdapter +from pathlib import Path +from nwb_linkml.adapters import NamespacesAdapter, SchemaAdapter +from nwb_schema_language import Attribute, Group, Namespace, Dataset, Namespaces, Schema, FlatDtype @pytest.mark.parametrize( @@ -48,8 +49,7 @@ def test_skip_imports(nwb_core_fixture): assert all([ns == "core" for ns in namespaces]) -@pytest.mark.skip() -def test_populate_inheritance(nwb_core_fixture): +def test_roll_down_inheritance(): """ Classes should receive and override the properties of their parents when they have neurodata_type_inc @@ -59,4 +59,74 @@ def test_populate_inheritance(nwb_core_fixture): Returns: """ - pass + parent_cls = Group( + neurodata_type_def="Parent", + doc="parent", + attributes=[ + Attribute(name="a", dims=["a", "b"], shape=[1, 2], doc="a", value="a"), + Attribute(name="b", dims=["c", "d"], shape=[3, 4], doc="b", value="b"), + ], + datasets=[ + Dataset( + name="data", + dims=["a", "b"], + shape=[1, 2], + doc="data", + attributes=[ + Attribute(name="c", dtype=FlatDtype.int32, doc="c"), + Attribute(name="d", dtype=FlatDtype.int32, doc="d"), + ], + ) + ], + ) + parent_sch = Schema(source="parent.yaml") + parent_ns = Namespaces( + namespaces=[ + Namespace( + author="hey", + contact="sup", + name="parent", + doc="a parent", + version="1", + schema=[parent_sch], + ) + ] + ) + + child_cls = Group( + neurodata_type_def="Child", + neurodata_type_inc="Parent", + doc="child", + attributes=[Attribute(name="a", doc="a")], + datasets=[ + Dataset( + name="data", + doc="data again", + attributes=[Attribute(name="a", doc="c", value="z"), Attribute(name="c", doc="c")], + ) + ], + ) + child_sch = Schema(source="child.yaml") + child_ns = Namespaces( + namespaces=[ + Namespace( + author="hey", + contact="sup", + name="child", + doc="a child", + version="1", + schema=[child_sch, Schema(namespace="parent")], + ) + ] + ) + + parent_schema_adapter = SchemaAdapter(path=Path("parent.yaml"), groups=[parent_cls]) + parent_ns_adapter = NamespacesAdapter(namespaces=parent_ns, schemas=[parent_schema_adapter]) + child_schema_adapter = SchemaAdapter(path=Path("child.yaml"), groups=[child_cls]) + child_ns_adapter = NamespacesAdapter( + namespaces=child_ns, schemas=[child_schema_adapter], imported=[parent_ns_adapter] + ) + + child_ns_adapter.complete_namespaces() + + child = child_ns_adapter.get("Child") diff --git a/nwb_schema_language/Makefile b/nwb_schema_language/Makefile index 9d6f45f..2f8cd76 100644 --- a/nwb_schema_language/Makefile +++ b/nwb_schema_language/Makefile @@ -6,7 +6,7 @@ SHELL := bash .SUFFIXES: .SECONDARY: -RUN = poetry run +RUN = pdm run # get values from about.yaml file SCHEMA_NAME = $(shell ${SHELL} ./utils/get-value.sh name) SOURCE_SCHEMA_PATH = $(shell ${SHELL} ./utils/get-value.sh source_schema_path) @@ -107,7 +107,7 @@ gen-project: $(PYMODEL) $(RUN) gen-project ${GEN_PARGS} -d $(DEST) $(SOURCE_SCHEMA_PATH) && mv $(DEST)/*.py $(PYMODEL) gen-pydantic: $(PYMODEL) - $(RUN) gen-pydantic $(SOURCE_SCHEMA_PATH) --pydantic_version 2 > $(PYMODEL)/nwb_schema_pydantic.py + $(RUN) generate_pydantic $(RUN) run_patches --phase post_generation_pydantic test: test-schema test-python test-examples diff --git a/nwb_schema_language/pyproject.toml b/nwb_schema_language/pyproject.toml index 1a59159..b912c77 100644 --- a/nwb_schema_language/pyproject.toml +++ b/nwb_schema_language/pyproject.toml @@ -9,7 +9,7 @@ dependencies = [ "linkml-runtime>=1.7.7", "pydantic>=2.3.0", ] -version = "0.1.3" +version = "0.2.0" description = "Translation of the nwb-schema-language to LinkML" readme = "README.md" @@ -20,6 +20,7 @@ documentation = "https://nwb-linkml.readthedocs.io" [project.scripts] run_patches = "nwb_schema_language.patches:main" +generate_pydantic = "nwb_schema_language.generator:generate" [tool.pdm] [tool.pdm.dev-dependencies] diff --git a/nwb_schema_language/src/nwb_schema_language/__init__.py b/nwb_schema_language/src/nwb_schema_language/__init__.py index 653b6ff..d211475 100644 --- a/nwb_schema_language/src/nwb_schema_language/__init__.py +++ b/nwb_schema_language/src/nwb_schema_language/__init__.py @@ -22,10 +22,10 @@ try: DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype] -except (NameError, RecursionError): +except (NameError, RecursionError) as e: warnings.warn( "Error importing pydantic classes, passing because we might be in the process of patching" - " them, but it is likely they are broken and you will be unable to use them!", + f" them, but it is likely they are broken and you will be unable to use them!\n{e}", stacklevel=1, ) diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py index 84132d0..d1bbac3 100644 --- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py +++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py @@ -1,14 +1,13 @@ from __future__ import annotations -from datetime import datetime, date -from enum import Enum -from typing import List, Dict, Optional, Any, Union -from pydantic import BaseModel as BaseModel, Field -import sys -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal +import re +import sys +from datetime import date, datetime, time +from decimal import Decimal +from enum import Enum +from typing import Any, ClassVar, Dict, List, Literal, Optional, Union + +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -16,11 +15,81 @@ version = "None" class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=False, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) pass -class ReftypeOptions(str, Enum): +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +class ParentizeMixin(BaseModel): + + @model_validator(mode="after") + def parentize(self): + """Set the parent attribute for all our fields they have one""" + for field_name in self.model_fields: + if field_name == "parent": + continue + field = getattr(self, field_name) + if not isinstance(field, list): + field = [field] + for item in field: + if hasattr(item, "parent"): + item.parent = self + + return self + + +linkml_meta = LinkMLMeta( + { + "default_prefix": "nwb_schema_language", + "default_range": "string", + "description": "Translation of the nwb-schema-language to LinkML", + "id": "https://w3id.org/p2p_ld/nwb-schema-language", + "imports": ["linkml:types"], + "license": "GNU GPL v3.0", + "name": "nwb-schema-language", + "prefixes": { + "linkml": {"prefix_prefix": "linkml", "prefix_reference": "https://w3id.org/linkml/"}, + "nwb_schema_language": { + "prefix_prefix": "nwb_schema_language", + "prefix_reference": "https://w3id.org/p2p_ld/nwb-schema-language/", + }, + "schema": {"prefix_prefix": "schema", "prefix_reference": "http://schema.org/"}, + }, + "see_also": ["https://p2p_ld.github.io/nwb-schema-language"], + "settings": { + "email": {"setting_key": "email", "setting_value": "\\S+@\\S+{\\.\\w}+"}, + "protected_string": {"setting_key": "protected_string", "setting_value": "^[A-Za-z_][A-Za-z0-9_]*$"}, + }, + "source_file": "/Users/jonny/git/p2p-ld/nwb-linkml/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml", + "title": "nwb-schema-language", + } +) + + +class ReftypeOptions(str, Enum): # Reference to another group or dataset of the given target_type ref = "ref" # Reference to another group or dataset of the given target_type @@ -32,7 +101,6 @@ class ReftypeOptions(str, Enum): class QuantityEnum(str, Enum): - # Zero or more instances, equivalent to zero_or_many ASTERISK = "*" # Zero or one instances, equivalent to zero_or_one @@ -48,7 +116,6 @@ class QuantityEnum(str, Enum): class FlatDtype(str, Enum): - # single precision floating point (32 bit) float = "float" # single precision floating point (32 bit) @@ -100,164 +167,642 @@ class FlatDtype(str, Enum): class Namespace(ConfiguredBaseModel): - - doc: str = Field(..., description="""Description of corresponding object.""") - name: str = Field(...) - full_name: Optional[str] = Field( - None, description="""Optional string with extended full name for the namespace.""" + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", + "slot_usage": {"name": {"name": "name", "required": True}}, + } ) - version: str = Field(...) + + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, + ) + name: str = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + full_name: Optional[str] = Field( + None, + description="""Optional string with extended full name for the namespace.""", + json_schema_extra={"linkml_meta": {"alias": "full_name", "domain_of": ["Namespace"]}}, + ) + version: str = Field(..., json_schema_extra={"linkml_meta": {"alias": "version", "domain_of": ["Namespace"]}}) date: Optional[datetime] = Field( - None, description="""Date that a namespace was last modified or released""" + None, + description="""Date that a namespace was last modified or released""", + json_schema_extra={ + "linkml_meta": { + "alias": "date", + "domain_of": ["Namespace"], + "examples": [{"value": "2017-04-25 17:14:13"}], + "slot_uri": "schema:dateModified", + } + }, ) author: List[str] | str = Field( - default_factory=list, + ..., description="""List of strings with the names of the authors of the namespace.""", + json_schema_extra={"linkml_meta": {"alias": "author", "domain_of": ["Namespace"], "slot_uri": "schema:author"}}, ) contact: List[str] | str = Field( - default_factory=list, + ..., description="""List of strings with the contact information for the authors. Ordering of the contacts should match the ordering of the authors.""", + json_schema_extra={ + "linkml_meta": { + "alias": "contact", + "domain_of": ["Namespace"], + "slot_uri": "schema:email", + "structured_pattern": {"interpolated": True, "syntax": "{email}"}, + } + }, ) schema_: Optional[List[Schema]] = Field( + None, alias="schema", - default_factory=list, description="""List of the schema to be included in this namespace.""", + json_schema_extra={"linkml_meta": {"alias": "schema_", "domain_of": ["Namespace"]}}, ) class Namespaces(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) - namespaces: Optional[List[Namespace]] = Field(default_factory=list) + namespaces: Optional[List[Namespace]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "namespaces", "domain_of": ["Namespaces"]}} + ) class Schema(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", + "rules": [ + { + "description": "If namespace is absent, source is required", + "postconditions": {"slot_conditions": {"source": {"name": "source", "required": True}}}, + "preconditions": { + "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "ABSENT"}} + }, + }, + { + "description": "If source is absent, namespace is required.", + "postconditions": {"slot_conditions": {"namespace": {"name": "namespace", "required": True}}}, + "preconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "ABSENT"}}}, + }, + { + "description": "If namespace is present, source is cannot be", + "postconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "ABSENT"}}}, + "preconditions": { + "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "PRESENT"}} + }, + }, + { + "description": "If source is present, namespace cannot be.", + "postconditions": { + "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "ABSENT"}} + }, + "preconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "PRESENT"}}}, + }, + ], + } + ) source: Optional[str] = Field( None, description="""describes the name of the YAML (or JSON) file with the schema specification. The schema files should be located in the same folder as the namespace file.""", + json_schema_extra={"linkml_meta": {"alias": "source", "domain_of": ["Schema"]}}, ) namespace: Optional[str] = Field( None, description="""describes a named reference to another namespace. In contrast to source, this is a reference by name to a known namespace (i.e., the namespace is resolved during the build and must point to an already existing namespace). This mechanism is used to allow, e.g., extension of a core namespace (here the NWB core namespace) without requiring hard paths to the files describing the core namespace. Either source or namespace must be specified, but not both.""", + json_schema_extra={"linkml_meta": {"alias": "namespace", "domain_of": ["Schema"]}}, ) title: Optional[str] = Field( - None, description="""a descriptive title for a file for documentation purposes.""" + None, + description="""a descriptive title for a file for documentation purposes.""", + json_schema_extra={"linkml_meta": {"alias": "title", "domain_of": ["Schema"]}}, ) neurodata_types: Optional[List[Union[Dataset, Group]]] = Field( - default_factory=list, + None, description="""an optional list of strings indicating which data types should be included from the given specification source or namespace. The default is null indicating that all data types should be included.""", + json_schema_extra={ + "linkml_meta": { + "alias": "neurodata_types", + "any_of": [{"range": "Dataset"}, {"range": "Group"}], + "domain_of": ["Schema"], + } + }, + ) + doc: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, ) - doc: Optional[str] = Field(None) -class Group(ConfiguredBaseModel): +class Group(ConfiguredBaseModel, ParentizeMixin): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) neurodata_type_def: Optional[str] = Field( None, description="""Used alongside neurodata_type_inc to indicate inheritance, naming, and mixins""", + json_schema_extra={ + "linkml_meta": { + "alias": "neurodata_type_def", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, ) neurodata_type_inc: Optional[str] = Field( None, description="""Used alongside neurodata_type_def to indicate inheritance, naming, and mixins""", + json_schema_extra={ + "linkml_meta": { + "alias": "neurodata_type_inc", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + name: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + default_name: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "default_name", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, + ) + quantity: Optional[Union[QuantityEnum, int]] = Field( + "1", + json_schema_extra={ + "linkml_meta": { + "alias": "quantity", + "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], + "domain_of": ["Group", "Link", "Dataset"], + "ifabsent": "int(1)", + "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + } + }, + ) + linkable: Optional[bool] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}} + ) + attributes: Optional[List[Attribute]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}} + ) + datasets: Optional[List[Dataset]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}} + ) + groups: Optional[List[Group]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}} + ) + links: Optional[List[Link]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "links", "domain_of": ["Group"]}} + ) + parent: Optional[Group] = Field( + None, + exclude=True, + description="""The parent group that contains this dataset or group""", + json_schema_extra={"linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}}, ) - name: Optional[str] = Field(None) - default_name: Optional[str] = Field(None) - doc: str = Field(..., description="""Description of corresponding object.""") - quantity: Optional[Union[QuantityEnum, int]] = Field(1) - linkable: Optional[bool] = Field(None) - attributes: Optional[List[Attribute]] = Field(default_factory=list) - datasets: Optional[List[Dataset]] = Field(default_factory=list) - groups: Optional[List[Group]] = Field(default_factory=list) - links: Optional[List[Link]] = Field(default_factory=list) class Groups(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) - groups: Optional[List[Group]] = Field(default_factory=list) + groups: Optional[List[Group]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}} + ) class Link(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) - name: Optional[str] = Field(None) - doc: str = Field(..., description="""Description of corresponding object.""") + name: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, + ) target_type: str = Field( ..., description="""Describes the neurodata_type of the target that the reference points to""", + json_schema_extra={"linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}}, + ) + quantity: Optional[Union[QuantityEnum, int]] = Field( + "1", + json_schema_extra={ + "linkml_meta": { + "alias": "quantity", + "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], + "domain_of": ["Group", "Link", "Dataset"], + "ifabsent": "int(1)", + "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + } + }, ) - quantity: Optional[Union[QuantityEnum, int]] = Field(1) class Datasets(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) - datasets: Optional[List[Dataset]] = Field(default_factory=list) + datasets: Optional[List[Dataset]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}} + ) class ReferenceDtype(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) target_type: str = Field( ..., description="""Describes the neurodata_type of the target that the reference points to""", + json_schema_extra={"linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}}, ) reftype: Optional[ReftypeOptions] = Field( - None, description="""describes the kind of reference""" + None, + description="""describes the kind of reference""", + json_schema_extra={"linkml_meta": {"alias": "reftype", "domain_of": ["ReferenceDtype"]}}, ) class CompoundDtype(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", + "slot_usage": { + "dtype": { + "any_of": [{"range": "ReferenceDtype"}, {"range": "FlatDtype"}], + "multivalued": False, + "name": "dtype", + "required": True, + }, + "name": {"name": "name", "required": True}, + }, + } + ) - name: str = Field(...) - doc: str = Field(..., description="""Description of corresponding object.""") - dtype: Union[FlatDtype, ReferenceDtype] = Field(...) + name: str = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, + ) + dtype: Union[FlatDtype, ReferenceDtype] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "alias": "dtype", + "any_of": [{"range": "ReferenceDtype"}, {"range": "FlatDtype"}], + "domain_of": ["CompoundDtype", "DtypeMixin"], + } + }, + ) class DtypeMixin(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", + "mixin": True, + "rules": [ + { + "postconditions": {"slot_conditions": {"dtype": {"multivalued": False, "name": "dtype"}}}, + "preconditions": {"slot_conditions": {"dtype": {"name": "dtype", "range": "FlatDtype"}}}, + } + ], + } + ) dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( - default_factory=list + None, + json_schema_extra={ + "linkml_meta": { + "alias": "dtype", + "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "domain_of": ["CompoundDtype", "DtypeMixin"], + } + }, ) class Attribute(DtypeMixin): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", + "mixins": ["DtypeMixin"], + "slot_usage": { + "name": {"name": "name", "required": True}, + "parent": {"any_of": [{"range": "Group"}, {"range": "Dataset"}], "name": "parent"}, + }, + } + ) - name: str = Field(...) - dims: Optional[List[Union[Any, str]]] = Field(None) - shape: Optional[List[Union[Any, int, str]]] = Field(None) + name: str = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + dims: Optional[List[Union[Any, str]]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "dims", + "any_of": [{"range": "string"}, {"range": "AnyType"}], + "domain_of": ["Attribute", "Dataset"], + "todos": [ + "Can't quite figure out how to allow an array of arrays - see " + "https://github.com/linkml/linkml/issues/895" + ], + } + }, + ) + shape: Optional[List[Union[Any, int, str]]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "shape", + "any_of": [ + {"minimum_value": 1, "range": "integer"}, + {"equals_string": "null", "range": "string"}, + {"range": "AnyType"}, + ], + "domain_of": ["Attribute", "Dataset"], + "todos": [ + "Can't quite figure out how to allow an array of arrays - see " + "https://github.com/linkml/linkml/issues/895" + ], + } + }, + ) value: Optional[Any] = Field( - None, description="""Optional constant, fixed value for the attribute.""" + None, + description="""Optional constant, fixed value for the attribute.""", + json_schema_extra={"linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}}, ) default_value: Optional[Any] = Field( - None, description="""Optional default value for variable-valued attributes.""" + None, + description="""Optional default value for variable-valued attributes.""", + json_schema_extra={"linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}}, + ) + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, ) - doc: str = Field(..., description="""Description of corresponding object.""") required: Optional[bool] = Field( True, description="""Optional boolean key describing whether the attribute is required. Default value is True.""", + json_schema_extra={"linkml_meta": {"alias": "required", "domain_of": ["Attribute"], "ifabsent": "true"}}, + ) + parent: Optional[Union[Dataset, Group]] = Field( + None, + exclude=True, + description="""The parent group that contains this dataset or group""", + json_schema_extra={ + "linkml_meta": { + "alias": "parent", + "any_of": [{"range": "Group"}, {"range": "Dataset"}], + "domain_of": ["Group", "Attribute", "Dataset"], + } + }, + ) + dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "dtype", + "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "domain_of": ["CompoundDtype", "DtypeMixin"], + } + }, ) - dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None) -class Dataset(DtypeMixin): +class Dataset(ConfiguredBaseModel, ParentizeMixin): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language", "mixins": ["DtypeMixin"]} + ) neurodata_type_def: Optional[str] = Field( None, description="""Used alongside neurodata_type_inc to indicate inheritance, naming, and mixins""", + json_schema_extra={ + "linkml_meta": { + "alias": "neurodata_type_def", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, ) neurodata_type_inc: Optional[str] = Field( None, description="""Used alongside neurodata_type_def to indicate inheritance, naming, and mixins""", + json_schema_extra={ + "linkml_meta": { + "alias": "neurodata_type_inc", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + name: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "name", + "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + default_name: Optional[str] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "default_name", + "domain_of": ["Group", "Dataset"], + "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, + } + }, + ) + dims: Optional[List[Union[Any, str]]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "dims", + "any_of": [{"range": "string"}, {"range": "AnyType"}], + "domain_of": ["Attribute", "Dataset"], + "todos": [ + "Can't quite figure out how to allow an array of arrays - see " + "https://github.com/linkml/linkml/issues/895" + ], + } + }, + ) + shape: Optional[List[Union[Any, int, str]]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "shape", + "any_of": [ + {"minimum_value": 1, "range": "integer"}, + {"equals_string": "null", "range": "string"}, + {"range": "AnyType"}, + ], + "domain_of": ["Attribute", "Dataset"], + "todos": [ + "Can't quite figure out how to allow an array of arrays - see " + "https://github.com/linkml/linkml/issues/895" + ], + } + }, ) - name: Optional[str] = Field(None) - default_name: Optional[str] = Field(None) - dims: Optional[List[Union[Any, str]]] = Field(None) - shape: Optional[List[Union[Any, int, str]]] = Field(None) value: Optional[Any] = Field( - None, description="""Optional constant, fixed value for the attribute.""" + None, + description="""Optional constant, fixed value for the attribute.""", + json_schema_extra={"linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}}, ) default_value: Optional[Any] = Field( - None, description="""Optional default value for variable-valued attributes.""" + None, + description="""Optional default value for variable-valued attributes.""", + json_schema_extra={"linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}}, ) - doc: str = Field(..., description="""Description of corresponding object.""") - quantity: Optional[Union[QuantityEnum, int]] = Field(1) - linkable: Optional[bool] = Field(None) - attributes: Optional[List[Attribute]] = Field(None) - dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None) + doc: str = Field( + ..., + description="""Description of corresponding object.""", + json_schema_extra={ + "linkml_meta": { + "alias": "doc", + "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + } + }, + ) + quantity: Optional[Union[QuantityEnum, int]] = Field( + "1", + json_schema_extra={ + "linkml_meta": { + "alias": "quantity", + "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], + "domain_of": ["Group", "Link", "Dataset"], + "ifabsent": "int(1)", + "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + } + }, + ) + linkable: Optional[bool] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}} + ) + attributes: Optional[List[Attribute]] = Field( + None, json_schema_extra={"linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}} + ) + parent: Optional[Group] = Field( + None, + exclude=True, + description="""The parent group that contains this dataset or group""", + json_schema_extra={"linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}}, + ) + dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "alias": "dtype", + "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "domain_of": ["CompoundDtype", "DtypeMixin"], + } + }, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Namespace.model_rebuild() +Namespaces.model_rebuild() +Schema.model_rebuild() +Group.model_rebuild() +Groups.model_rebuild() +Link.model_rebuild() +Datasets.model_rebuild() +ReferenceDtype.model_rebuild() +CompoundDtype.model_rebuild() +DtypeMixin.model_rebuild() +Attribute.model_rebuild() +Dataset.model_rebuild() diff --git a/nwb_schema_language/src/nwb_schema_language/generator.py b/nwb_schema_language/src/nwb_schema_language/generator.py new file mode 100644 index 0000000..eefad6b --- /dev/null +++ b/nwb_schema_language/src/nwb_schema_language/generator.py @@ -0,0 +1,52 @@ +from pathlib import Path +from dataclasses import dataclass + +from linkml.generators.pydanticgen import PydanticGenerator +from linkml.generators.pydanticgen.build import ClassResult +from linkml.generators.pydanticgen.template import Import, ObjectImport +from linkml_runtime import SchemaView +from pydantic import BaseModel, model_validator + + +class ParentizeMixin(BaseModel): + + @model_validator(mode="after") + def parentize(self): + """Set the parent attribute for all our fields they have one""" + for field_name in self.model_fields: + if field_name == "parent": + continue + field = getattr(self, field_name) + if not isinstance(field, list): + field = [field] + for item in field: + if hasattr(item, "parent"): + item.parent = self + + return self + + +@dataclass +class NWBSchemaLangGenerator(PydanticGenerator): + + def __init__(self, *args, **kwargs): + kwargs["injected_classes"] = [ParentizeMixin] + kwargs["imports"] = [ + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]) + ] + kwargs["black"] = True + super().__init__(*args, **kwargs) + + def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult: + if cls.cls.name in ("Dataset", "Group"): + cls.cls.bases = ["ConfiguredBaseModel", "ParentizeMixin"] + return cls + + +def generate(): + schema = Path(__file__).parent / "schema" / "nwb_schema_language.yaml" + output = Path(__file__).parent / "datamodel" / "nwb_schema_pydantic.py" + generator = NWBSchemaLangGenerator(schema=schema) + generated = generator.serialize() + with open(output, "w") as ofile: + ofile.write(generated) diff --git a/nwb_schema_language/src/nwb_schema_language/patches.py b/nwb_schema_language/src/nwb_schema_language/patches.py index 1b2c9a5..f6fa5b1 100644 --- a/nwb_schema_language/src/nwb_schema_language/patches.py +++ b/nwb_schema_language/src/nwb_schema_language/patches.py @@ -49,8 +49,15 @@ class Patch: patch_schema_slot = Patch( phase=Phases.post_generation_pydantic, path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"), - match=r"\n\s*(schema:)(.*Field\()(.*)", - replacement=r'\n schema_:\2alias="schema", \3', + match=r"\n\s*(schema:)(.*Field\(\n\s*None,\n)(.*)", + replacement=r'\n schema_:\2 alias="schema",\n\3', +) + +patch_schema_slot_no_newline = Patch( + phase=Phases.post_generation_pydantic, + path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"), + match=r"\n\s*(schema:)(.*Field\(None,)(.*)", + replacement=r'\n schema_:\2 alias="schema", \3', ) patch_dtype_single_multiple = Patch( @@ -74,6 +81,20 @@ patch_contact_single_multiple = Patch( replacement="contact: List[str] | str", ) +patch_validate_assignment = Patch( + phase=Phases.post_generation_pydantic, + path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"), + match=r"validate_assignment=True", + replacement="validate_assignment=False", +) + +patch_exclude_parent = Patch( + phase=Phases.post_generation_pydantic, + path=Path("src/nwb_schema_language/datamodel/nwb_schema_pydantic.py"), + match=r"(parent:.*Field\(\n\s*None,\n)(.*)", + replacement=r"\1 exclude=True,\n\2", +) + def run_patches(phase: Phases, verbose: bool = False) -> None: """ diff --git a/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml b/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml index ff06a56..00c9aa9 100644 --- a/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml +++ b/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml @@ -78,6 +78,7 @@ classes: - datasets - groups - links + - parent Groups: slots: @@ -94,9 +95,14 @@ classes: - default_value - doc - required + - parent slot_usage: name: required: true + parent: + any_of: + - range: Group + - range: Dataset Link: slots: @@ -121,6 +127,7 @@ classes: - quantity - linkable - attributes + - parent Datasets: slots: @@ -177,7 +184,7 @@ slots: description: Optional string with extended full name for the namespace. version: required: true - pattern: "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" +# pattern: "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" date: range: datetime slot_uri: schema:dateModified @@ -207,7 +214,6 @@ slots: # schema source: description: describes the name of the YAML (or JSON) file with the schema specification. The schema files should be located in the same folder as the namespace file. - pattern: ".*\\.(yml|yaml|json)" namespace: description: describes a named reference to another namespace. In contrast to source, this is a reference by name to a known namespace (i.e., the namespace is resolved during the build and must point to an already existing namespace). This mechanism is used to allow, e.g., extension of a core namespace (here the NWB core namespace) without requiring hard paths to the files describing the core namespace. Either source or namespace must be specified, but not both. namespaces: @@ -312,6 +318,11 @@ slots: description: describes the kind of reference range: reftype_options + # extra - not defined in nwb-schema-language but useful when working with class objects + parent: + description: The parent group that contains this dataset or group + range: Group + required: false enums: diff --git a/nwb_schema_language/tests/test_data.py b/nwb_schema_language/tests/test_data.py deleted file mode 100644 index b2f7030..0000000 --- a/nwb_schema_language/tests/test_data.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Data test.""" - -import os -import glob -import unittest - -from linkml_runtime.loaders import yaml_loader -from nwb_schema_language.datamodel.nwb_schema_language import Namespaces - -ROOT = os.path.join(os.path.dirname(__file__), "..") -DATA_DIR = os.path.join(ROOT, "src", "data", "tests") - -EXAMPLE_FILES = glob.glob(os.path.join(DATA_DIR, "*.yaml")) - - -class TestData(unittest.TestCase): - """Test data and datamodel.""" - - def test_namespaces(self): - """Date test.""" - namespace_file = [f for f in EXAMPLE_FILES if "namespace.yaml" in f][0] - obj = yaml_loader.load(namespace_file, target_class=Namespaces) - assert obj diff --git a/nwb_schema_language/tests/test_mixins.py b/nwb_schema_language/tests/test_mixins.py new file mode 100644 index 0000000..ba98e6e --- /dev/null +++ b/nwb_schema_language/tests/test_mixins.py @@ -0,0 +1,31 @@ +from nwb_schema_language import Group, Dataset, Attribute + + +def test_parentize_mixin(): + """ + the parentize mixin should populate the "parent" attribute for applicable children + """ + dset_attr = Attribute(name="dset_attr", doc="") + dset = Dataset( + name="dataset", doc="", attributes=[dset_attr, {"name": "dict_based_attr", "doc": ""}] + ) + group_attr = Attribute(name="group_attr", doc="") + group = Group( + name="group", + doc="", + attributes=[group_attr, {"name": "dict_based_attr", "doc": ""}], + datasets=[dset, {"name": "dict_based_dset", "doc": ""}], + ) + + assert dset_attr.parent is dset + assert dset.attributes[1].name == "dict_based_attr" + assert dset.attributes[1].parent is dset + assert dset.parent is group + assert group_attr.parent is group + assert group.attributes[1].name == "dict_based_attr" + assert group.attributes[1].parent is group + assert group.datasets[1].name == "dict_based_dset" + assert group.datasets[1].parent is group + + dumped = group.model_dump() + assert "parent" not in dumped From 749703e0779790c072500bfba8c65a742be2b0d4 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Fri, 13 Sep 2024 02:42:01 -0700 Subject: [PATCH 02/18] partially functioning rolldown, but getting some wonky results - missing attributes in nested inheritance, and the models are now extremely noisy, creating the same fields over and over even when they aren't overridden or modified by the child class. Need to redo the rolldown, make it less generic, don't dump to dicts, merge in a more targeted way. --- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 8 +- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 37 +- .../src/nwb_linkml/adapters/namespaces.py | 78 +++-- nwb_linkml/src/nwb_linkml/io/schema.py | 2 +- nwb_linkml/src/nwb_linkml/plot.py | 2 +- nwb_linkml/src/nwb_linkml/providers/linkml.py | 2 +- nwb_linkml/src/nwb_linkml/util.py | 73 ++++ .../test_adapters/test_adapter_namespaces.py | 42 ++- .../models/pydantic/hdmf_common/__init__.py | 1 + .../datamodel/nwb_schema_pydantic.py | 315 ++++++++++++++---- .../src/nwb_schema_language/generator.py | 20 +- 11 files changed, 489 insertions(+), 91 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/util.py diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index f7b4f2f..cb16165 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -17,7 +17,7 @@ from linkml_runtime.linkml_model import ( SlotDefinition, TypeDefinition, ) -from pydantic import BaseModel +from pydantic import BaseModel, PrivateAttr from nwb_linkml.logging import init_logger from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema @@ -103,6 +103,7 @@ class Adapter(BaseModel): _logger: Optional[Logger] = None _debug: Optional[bool] = None + _nwb_classes: dict[str, Dataset | Group] = PrivateAttr(default_factory=dict) @property def debug(self) -> bool: @@ -135,7 +136,10 @@ class Adapter(BaseModel): Convenience wrapper around :meth:`.walk_field_values` """ - return next(self.walk_field_values(self, "neurodata_type_def", name)) + if name not in self._nwb_classes: + cls = next(self.walk_field_values(self, "neurodata_type_def", name)) + self._nwb_classes[name] = cls + return self._nwb_classes[name] def get_model_with_field(self, field: str) -> Generator[Union[Group, Dataset], None, None]: """ diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index f0b0053..39d4450 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -616,7 +616,8 @@ class MapNVectors(DatasetMap): DynamicTable (and the slot VectorData where this is called for) is handled specially and just dropped, because we handle the possibility for - arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes. + arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes + (see :class:`.MapNVectorData` ). So really this is just a handler for the `Images` case """ @@ -652,6 +653,40 @@ class MapNVectors(DatasetMap): return res +class MapNVectorData(DatasetMap): + """ + An extremely special case just for DynamicTable: + DynamicTable indicates that all of its extra columns are ``VectorData`` with an + unnamed, * quantity dataset similar to the case of :class:`.MapNVectors` . + + We handle this with the :mod:`.includes.hdmf` module mixin classes instead, + and so to avoid generating a pointless slot and class, + we just catch that case and return nothing. + """ + + @classmethod + def check(c, cls: Dataset) -> bool: + """ + Check for being an unnamed multivalued vector class that IS VectorData + """ + return ( + cls.name is None + and cls.neurodata_type_def is None + and cls.neurodata_type_inc + and cls.neurodata_type_inc == "VectorData" + and cls.quantity in ("*", "+") + ) + + @classmethod + def apply( + c, cls: Dataset, res: Optional[BuildResult] = None, name: Optional[str] = None + ) -> BuildResult: + """ + Return ... nothing + """ + return BuildResult() + + class MapCompoundDtype(DatasetMap): """ A ``dtype`` declared as an array of types that function effectively as a row in a table. diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 96d653e..78e3027 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -14,13 +14,13 @@ from typing import Dict, Generator, List, Optional from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.linkml_model import Annotation, SchemaDefinition from pydantic import Field, model_validator -import networkx as nx from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.lang_elements import NwbLangSchema from nwb_linkml.ui import AdapterProgress -from nwb_schema_language import Namespaces +from nwb_linkml.util import merge_dicts +from nwb_schema_language import Dataset, Group, Namespaces class NamespacesAdapter(Adapter): @@ -156,7 +156,7 @@ class NamespacesAdapter(Adapter): break return self - def complete_namespaces(self): + def complete_namespaces(self) -> None: """ After loading the namespace, and after any imports have been added afterwards, this must be called to complete the definitions of the contained schema objects. @@ -167,7 +167,7 @@ class NamespacesAdapter(Adapter): It **is** automatically called if it hasn't been already by the :meth:`.build` method. """ - self.populate_imports() + self._populate_imports() self._roll_down_inheritance() for i in self.imported: @@ -175,7 +175,7 @@ class NamespacesAdapter(Adapter): self._completed = True - def _roll_down_inheritance(self): + def _roll_down_inheritance(self) -> None: """ nwb-schema-language inheritance doesn't work like normal python inheritance - instead of inheriting everything at the 'top level' of a class, it also @@ -184,21 +184,59 @@ class NamespacesAdapter(Adapter): References: https://github.com/NeurodataWithoutBorders/pynwb/issues/1954 """ - pass + for cls in self.walk_types(self, (Group, Dataset)): + if not cls.neurodata_type_inc: + continue - def inheritance_graph(self) -> nx.DiGraph: - """ - Make a graph of all ``neurodata_types`` in the namespace and imports such that - each node contains the group or dataset it describes, - and has directed edges pointing at all the classes that inherit from it. + # get parents + parent = self.get(cls.neurodata_type_inc) + parents = [parent] + while parent.neurodata_type_inc: + parent = self.get(parent.neurodata_type_inc) + parents.insert(0, parent) + parents.append(cls) - In the case that the inheriting class does not itself have a ``neurodata_type_def``, - it is - """ - g = nx.DiGraph() - for sch in self.all_schemas(): - for cls in sch.created_classes: - pass + # merge and cast + # note that we don't want to exclude_none in the model dump here, + # if the child class has a field completely unset, we want to inherit it + # from the parent without rolling it down - we are only rolling down + # the things that need to be modified/merged in the child + new_cls: dict = {} + for parent in parents: + new_cls = merge_dicts( + new_cls, + parent.model_dump(exclude_unset=True), + list_key="name", + exclude=["neurodata_type_def"], + ) + new_cls: Group | Dataset = type(cls)(**new_cls) + new_cls.parent = cls.parent + + # reinsert + if new_cls.parent: + if isinstance(cls, Dataset): + new_cls.parent.datasets[new_cls.parent.datasets.index(cls)] = new_cls + else: + new_cls.parent.groups[new_cls.parent.groups.index(cls)] = new_cls + else: + # top level class, need to go and find it + found = False + for schema in self.all_schemas(): + if isinstance(cls, Dataset): + if cls in schema.datasets: + schema.datasets[schema.datasets.index(cls)] = new_cls + found = True + break + else: + if cls in schema.groups: + schema.groups[schema.groups.index(cls)] = new_cls + found = True + break + if not found: + raise KeyError( + f"Unable to find source schema for {cls} when reinserting after rolling" + " down!" + ) def find_type_source(self, name: str) -> SchemaAdapter: """ @@ -238,7 +276,7 @@ class NamespacesAdapter(Adapter): else: raise KeyError(f"No schema found that define {name}") - def populate_imports(self) -> "NamespacesAdapter": + def _populate_imports(self) -> "NamespacesAdapter": """ Populate the imports that are needed for each schema file @@ -338,5 +376,5 @@ class NamespacesAdapter(Adapter): for sch in self.schemas: yield sch for imported in self.imported: - for sch in imported: + for sch in imported.schemas: yield sch diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index 8f960c7..065d0d3 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -131,7 +131,7 @@ def load_namespace_adapter( else: adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch) - adapter.populate_imports() + adapter.complete_namespaces() return adapter diff --git a/nwb_linkml/src/nwb_linkml/plot.py b/nwb_linkml/src/nwb_linkml/plot.py index e4cb4c9..e08f536 100644 --- a/nwb_linkml/src/nwb_linkml/plot.py +++ b/nwb_linkml/src/nwb_linkml/plot.py @@ -85,7 +85,7 @@ def make_node( def make_graph(namespaces: "NamespacesAdapter", recurse: bool = True) -> List[CytoElement]: - namespaces.populate_imports() + namespaces.complete_namespaces() nodes = [] element: Namespace | Group | Dataset print("walking graph") diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py index fe8dec5..c106389 100644 --- a/nwb_linkml/src/nwb_linkml/providers/linkml.py +++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py @@ -127,7 +127,7 @@ class LinkMLProvider(Provider): for schema_needs in adapter.needed_imports.values(): for needed in schema_needs: adapter.imported.append(ns_adapters[needed]) - adapter.populate_imports() + adapter.complete_namespaces() # then do the build res = {} diff --git a/nwb_linkml/src/nwb_linkml/util.py b/nwb_linkml/src/nwb_linkml/util.py new file mode 100644 index 0000000..ca85357 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/util.py @@ -0,0 +1,73 @@ +""" +The much maligned junk drawer +""" + + +def merge_dicts( + source: dict, target: dict, list_key: str | None = None, exclude: list[str] | None = None +) -> dict: + """ + Deeply merge nested dictionaries, replacing already-declared keys rather than + e.g. merging lists as well + + Args: + source (dict): source dictionary + target (dict): target dictionary (values merged over source) + list_key (str | None): Optional: if present, merge lists of dicts using this to + identify matching dicts + exclude: (list[str] | None): Optional: if present, exclude keys from parent. + + References: + https://stackoverflow.com/a/20666342/13113166 + + """ + if exclude is None: + exclude = [] + ret = {k: v for k, v in source.items() if k not in exclude} + for key, value in target.items(): + if key not in ret: + ret[key] = value + elif isinstance(value, dict): + if key in ret: + ret[key] = merge_dicts(ret[key], value, list_key, exclude) + else: + ret[key] = value + elif isinstance(value, list) and list_key and all([isinstance(v, dict) for v in value]): + src_keys = {v[list_key]: ret[key].index(v) for v in ret.get(key, {}) if list_key in v} + target_keys = {v[list_key]: value.index(v) for v in value if list_key in v} + + # all dicts not in target + # screwy double iteration to preserve dict order + new_val = [ + ret[key][src_keys[k]] + for k in src_keys + if k in set(src_keys.keys()) - set(target_keys.keys()) + ] + # all dicts not in source + new_val.extend( + [ + value[target_keys[k]] + for k in target_keys + if k in set(target_keys.keys()) - set(src_keys.keys()) + ] + ) + # merge dicts in both + new_val.extend( + [ + merge_dicts(ret[key][src_keys[k]], value[target_keys[k]], list_key, exclude) + for k in target_keys + if k in set(src_keys.keys()).intersection(set(target_keys.keys())) + ] + ) + new_val = sorted(new_val, key=lambda i: i[list_key]) + # add any dicts that don't have the list_key + # they can't be merged since they can't be matched + new_val.extend([v for v in ret.get(key, {}) if list_key not in v]) + new_val.extend([v for v in value if list_key not in v]) + + ret[key] = new_val + + else: + ret[key] = value + + return ret diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py index 768669b..2052778 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py @@ -1,7 +1,9 @@ -import pytest from pathlib import Path + +import pytest + from nwb_linkml.adapters import NamespacesAdapter, SchemaAdapter -from nwb_schema_language import Attribute, Group, Namespace, Dataset, Namespaces, Schema, FlatDtype +from nwb_schema_language import Attribute, Dataset, FlatDtype, Group, Namespace, Namespaces, Schema @pytest.mark.parametrize( @@ -20,7 +22,7 @@ def test_find_type_source(nwb_core_fixture, class_name, schema_file, namespace_n def test_populate_imports(nwb_core_fixture): - nwb_core_fixture.populate_imports() + nwb_core_fixture._populate_imports() schema: SchemaAdapter assert len(nwb_core_fixture.schemas) > 0 for schema in nwb_core_fixture.schemas: @@ -97,14 +99,15 @@ def test_roll_down_inheritance(): neurodata_type_def="Child", neurodata_type_inc="Parent", doc="child", - attributes=[Attribute(name="a", doc="a")], + attributes=[Attribute(name="a", doc="a", value="z")], datasets=[ Dataset( name="data", doc="data again", - attributes=[Attribute(name="a", doc="c", value="z"), Attribute(name="c", doc="c")], - ) + attributes=[Attribute(name="c", doc="c", value="z"), Attribute(name="e", doc="e")], + ), ], + groups=[Group(name="untyped_child", neurodata_type_inc="Parent", doc="untyped child")], ) child_sch = Schema(source="child.yaml") child_ns = Namespaces( @@ -130,3 +133,30 @@ def test_roll_down_inheritance(): child_ns_adapter.complete_namespaces() child = child_ns_adapter.get("Child") + # overrides simple attrs + assert child.doc == "child" + # gets unassigned parent attrs + assert "b" in [attr.name for attr in child.attributes] + # overrides values while preserving remaining values when set + attr_a = [attr for attr in child.attributes if attr.name == "a"][0] + assert attr_a.value == "z" + assert attr_a.dims == parent_cls.attributes[0].dims + assert [attr.value for attr in child.attributes if attr.name == "a"][0] == "z" + + # preserve unset values in child datasets + assert child.datasets[0].dtype == parent_cls.datasets[0].dtype + assert child.datasets[0].dims == parent_cls.datasets[0].dims + # gets undeclared attrs in child datasets + assert "d" in [attr.name for attr in child.datasets[0].attributes] + # overrides set values in child datasets while preserving unset + c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0] + assert c_attr.value == "z" + assert c_attr.dtype == FlatDtype.int32 + # preserves new attrs + assert "e" in [attr.name for attr in child.datasets[0].attributes] + + # neurodata_type_def is not included in untyped children + assert child.groups[0].neurodata_type_def is None + # we don't set any of the attrs from the parent class here because we don't override them, + # so we don't need to merge them, and we don't want to clutter our linkml models unnecessarily + assert child.groups[0].attributes is None diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py index e69de29..8b13789 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py index d1bbac3..ca7e8be 100644 --- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py +++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py @@ -81,7 +81,10 @@ linkml_meta = LinkMLMeta( "see_also": ["https://p2p_ld.github.io/nwb-schema-language"], "settings": { "email": {"setting_key": "email", "setting_value": "\\S+@\\S+{\\.\\w}+"}, - "protected_string": {"setting_key": "protected_string", "setting_value": "^[A-Za-z_][A-Za-z0-9_]*$"}, + "protected_string": { + "setting_key": "protected_string", + "setting_value": "^[A-Za-z_][A-Za-z0-9_]*$", + }, }, "source_file": "/Users/jonny/git/p2p-ld/nwb-linkml/nwb_schema_language/src/nwb_schema_language/schema/nwb_schema_language.yaml", "title": "nwb-schema-language", @@ -180,7 +183,15 @@ class Namespace(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) @@ -189,7 +200,14 @@ class Namespace(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -199,7 +217,9 @@ class Namespace(ConfiguredBaseModel): description="""Optional string with extended full name for the namespace.""", json_schema_extra={"linkml_meta": {"alias": "full_name", "domain_of": ["Namespace"]}}, ) - version: str = Field(..., json_schema_extra={"linkml_meta": {"alias": "version", "domain_of": ["Namespace"]}}) + version: str = Field( + ..., json_schema_extra={"linkml_meta": {"alias": "version", "domain_of": ["Namespace"]}} + ) date: Optional[datetime] = Field( None, description="""Date that a namespace was last modified or released""", @@ -215,7 +235,13 @@ class Namespace(ConfiguredBaseModel): author: List[str] | str = Field( ..., description="""List of strings with the names of the authors of the namespace.""", - json_schema_extra={"linkml_meta": {"alias": "author", "domain_of": ["Namespace"], "slot_uri": "schema:author"}}, + json_schema_extra={ + "linkml_meta": { + "alias": "author", + "domain_of": ["Namespace"], + "slot_uri": "schema:author", + } + }, ) contact: List[str] | str = Field( ..., @@ -238,10 +264,13 @@ class Namespace(ConfiguredBaseModel): class Namespaces(ConfiguredBaseModel): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) namespaces: Optional[List[Namespace]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "namespaces", "domain_of": ["Namespaces"]}} + None, + json_schema_extra={"linkml_meta": {"alias": "namespaces", "domain_of": ["Namespaces"]}}, ) @@ -252,29 +281,51 @@ class Schema(ConfiguredBaseModel): "rules": [ { "description": "If namespace is absent, source is required", - "postconditions": {"slot_conditions": {"source": {"name": "source", "required": True}}}, + "postconditions": { + "slot_conditions": {"source": {"name": "source", "required": True}} + }, "preconditions": { - "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "ABSENT"}} + "slot_conditions": { + "namespace": {"name": "namespace", "value_presence": "ABSENT"} + } }, }, { "description": "If source is absent, namespace is required.", - "postconditions": {"slot_conditions": {"namespace": {"name": "namespace", "required": True}}}, - "preconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "ABSENT"}}}, + "postconditions": { + "slot_conditions": {"namespace": {"name": "namespace", "required": True}} + }, + "preconditions": { + "slot_conditions": { + "source": {"name": "source", "value_presence": "ABSENT"} + } + }, }, { "description": "If namespace is present, source is cannot be", - "postconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "ABSENT"}}}, + "postconditions": { + "slot_conditions": { + "source": {"name": "source", "value_presence": "ABSENT"} + } + }, "preconditions": { - "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "PRESENT"}} + "slot_conditions": { + "namespace": {"name": "namespace", "value_presence": "PRESENT"} + } }, }, { "description": "If source is present, namespace cannot be.", "postconditions": { - "slot_conditions": {"namespace": {"name": "namespace", "value_presence": "ABSENT"}} + "slot_conditions": { + "namespace": {"name": "namespace", "value_presence": "ABSENT"} + } + }, + "preconditions": { + "slot_conditions": { + "source": {"name": "source", "value_presence": "PRESENT"} + } }, - "preconditions": {"slot_conditions": {"source": {"name": "source", "value_presence": "PRESENT"}}}, }, ], } @@ -311,14 +362,24 @@ class Schema(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) class Group(ConfiguredBaseModel, ParentizeMixin): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) neurodata_type_def: Optional[str] = Field( None, @@ -347,7 +408,14 @@ class Group(ConfiguredBaseModel, ParentizeMixin): json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -368,7 +436,15 @@ class Group(ConfiguredBaseModel, ParentizeMixin): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) @@ -380,21 +456,32 @@ class Group(ConfiguredBaseModel, ParentizeMixin): "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], "domain_of": ["Group", "Link", "Dataset"], "ifabsent": "int(1)", - "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + "todos": [ + "logic to check that the corresponding class can only be " + "implemented quantity times." + ], } }, ) linkable: Optional[bool] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}} + None, + json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}}, ) attributes: Optional[List[Attribute]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}} + None, + json_schema_extra={ + "linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]} + }, ) datasets: Optional[List[Dataset]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}} + None, + json_schema_extra={ + "linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]} + }, ) groups: Optional[List[Group]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}} + None, + json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}}, ) links: Optional[List[Link]] = Field( None, json_schema_extra={"linkml_meta": {"alias": "links", "domain_of": ["Group"]}} @@ -403,27 +490,41 @@ class Group(ConfiguredBaseModel, ParentizeMixin): None, exclude=True, description="""The parent group that contains this dataset or group""", - json_schema_extra={"linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]} + }, ) class Groups(ConfiguredBaseModel): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) groups: Optional[List[Group]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}} + None, + json_schema_extra={"linkml_meta": {"alias": "groups", "domain_of": ["Group", "Groups"]}}, ) class Link(ConfiguredBaseModel): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) name: Optional[str] = Field( None, json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -434,14 +535,24 @@ class Link(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) target_type: str = Field( ..., description="""Describes the neurodata_type of the target that the reference points to""", - json_schema_extra={"linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]} + }, ) quantity: Optional[Union[QuantityEnum, int]] = Field( "1", @@ -451,27 +562,39 @@ class Link(ConfiguredBaseModel): "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], "domain_of": ["Group", "Link", "Dataset"], "ifabsent": "int(1)", - "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + "todos": [ + "logic to check that the corresponding class can only be " + "implemented quantity times." + ], } }, ) class Datasets(ConfiguredBaseModel): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) datasets: Optional[List[Dataset]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]}} + None, + json_schema_extra={ + "linkml_meta": {"alias": "datasets", "domain_of": ["Group", "Datasets"]} + }, ) class ReferenceDtype(ConfiguredBaseModel): - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"}) + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://w3id.org/p2p_ld/nwb-schema-language"} + ) target_type: str = Field( ..., description="""Describes the neurodata_type of the target that the reference points to""", - json_schema_extra={"linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "target_type", "domain_of": ["Link", "ReferenceDtype"]} + }, ) reftype: Optional[ReftypeOptions] = Field( None, @@ -501,7 +624,14 @@ class CompoundDtype(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -512,7 +642,15 @@ class CompoundDtype(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) @@ -535,8 +673,12 @@ class DtypeMixin(ConfiguredBaseModel): "mixin": True, "rules": [ { - "postconditions": {"slot_conditions": {"dtype": {"multivalued": False, "name": "dtype"}}}, - "preconditions": {"slot_conditions": {"dtype": {"name": "dtype", "range": "FlatDtype"}}}, + "postconditions": { + "slot_conditions": {"dtype": {"multivalued": False, "name": "dtype"}} + }, + "preconditions": { + "slot_conditions": {"dtype": {"name": "dtype", "range": "FlatDtype"}} + }, } ], } @@ -547,7 +689,11 @@ class DtypeMixin(ConfiguredBaseModel): json_schema_extra={ "linkml_meta": { "alias": "dtype", - "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "any_of": [ + {"range": "FlatDtype"}, + {"range": "CompoundDtype"}, + {"range": "ReferenceDtype"}, + ], "domain_of": ["CompoundDtype", "DtypeMixin"], } }, @@ -571,7 +717,14 @@ class Attribute(DtypeMixin): json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -611,12 +764,16 @@ class Attribute(DtypeMixin): value: Optional[Any] = Field( None, description="""Optional constant, fixed value for the attribute.""", - json_schema_extra={"linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]} + }, ) default_value: Optional[Any] = Field( None, description="""Optional default value for variable-valued attributes.""", - json_schema_extra={"linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]} + }, ) doc: str = Field( ..., @@ -624,14 +781,24 @@ class Attribute(DtypeMixin): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) required: Optional[bool] = Field( True, description="""Optional boolean key describing whether the attribute is required. Default value is True.""", - json_schema_extra={"linkml_meta": {"alias": "required", "domain_of": ["Attribute"], "ifabsent": "true"}}, + json_schema_extra={ + "linkml_meta": {"alias": "required", "domain_of": ["Attribute"], "ifabsent": "true"} + }, ) parent: Optional[Union[Dataset, Group]] = Field( None, @@ -650,7 +817,11 @@ class Attribute(DtypeMixin): json_schema_extra={ "linkml_meta": { "alias": "dtype", - "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "any_of": [ + {"range": "FlatDtype"}, + {"range": "CompoundDtype"}, + {"range": "ReferenceDtype"}, + ], "domain_of": ["CompoundDtype", "DtypeMixin"], } }, @@ -689,7 +860,14 @@ class Dataset(ConfiguredBaseModel, ParentizeMixin): json_schema_extra={ "linkml_meta": { "alias": "name", - "domain_of": ["Namespace", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], "structured_pattern": {"interpolated": True, "syntax": "{protected_string}"}, } }, @@ -739,12 +917,16 @@ class Dataset(ConfiguredBaseModel, ParentizeMixin): value: Optional[Any] = Field( None, description="""Optional constant, fixed value for the attribute.""", - json_schema_extra={"linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "value", "domain_of": ["Attribute", "Dataset"]} + }, ) default_value: Optional[Any] = Field( None, description="""Optional default value for variable-valued attributes.""", - json_schema_extra={"linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "default_value", "domain_of": ["Attribute", "Dataset"]} + }, ) doc: str = Field( ..., @@ -752,7 +934,15 @@ class Dataset(ConfiguredBaseModel, ParentizeMixin): json_schema_extra={ "linkml_meta": { "alias": "doc", - "domain_of": ["Namespace", "Schema", "Group", "Attribute", "Link", "Dataset", "CompoundDtype"], + "domain_of": [ + "Namespace", + "Schema", + "Group", + "Attribute", + "Link", + "Dataset", + "CompoundDtype", + ], } }, ) @@ -764,28 +954,41 @@ class Dataset(ConfiguredBaseModel, ParentizeMixin): "any_of": [{"minimum_value": 1, "range": "integer"}, {"range": "QuantityEnum"}], "domain_of": ["Group", "Link", "Dataset"], "ifabsent": "int(1)", - "todos": ["logic to check that the corresponding class can only be " "implemented quantity times."], + "todos": [ + "logic to check that the corresponding class can only be " + "implemented quantity times." + ], } }, ) linkable: Optional[bool] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}} + None, + json_schema_extra={"linkml_meta": {"alias": "linkable", "domain_of": ["Group", "Dataset"]}}, ) attributes: Optional[List[Attribute]] = Field( - None, json_schema_extra={"linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]}} + None, + json_schema_extra={ + "linkml_meta": {"alias": "attributes", "domain_of": ["Group", "Dataset"]} + }, ) parent: Optional[Group] = Field( None, exclude=True, description="""The parent group that contains this dataset or group""", - json_schema_extra={"linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]}}, + json_schema_extra={ + "linkml_meta": {"alias": "parent", "domain_of": ["Group", "Attribute", "Dataset"]} + }, ) dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( None, json_schema_extra={ "linkml_meta": { "alias": "dtype", - "any_of": [{"range": "FlatDtype"}, {"range": "CompoundDtype"}, {"range": "ReferenceDtype"}], + "any_of": [ + {"range": "FlatDtype"}, + {"range": "CompoundDtype"}, + {"range": "ReferenceDtype"}, + ], "domain_of": ["CompoundDtype", "DtypeMixin"], } }, diff --git a/nwb_schema_language/src/nwb_schema_language/generator.py b/nwb_schema_language/src/nwb_schema_language/generator.py index eefad6b..38519a4 100644 --- a/nwb_schema_language/src/nwb_schema_language/generator.py +++ b/nwb_schema_language/src/nwb_schema_language/generator.py @@ -1,5 +1,9 @@ -from pathlib import Path +""" +Customization of linkml pydantic generator +""" + from dataclasses import dataclass +from pathlib import Path from linkml.generators.pydanticgen import PydanticGenerator from linkml.generators.pydanticgen.build import ClassResult @@ -9,9 +13,10 @@ from pydantic import BaseModel, model_validator class ParentizeMixin(BaseModel): + """Mixin to populate the parent field for nested datasets and groups""" @model_validator(mode="after") - def parentize(self): + def parentize(self) -> BaseModel: """Set the parent attribute for all our fields they have one""" for field_name in self.model_fields: if field_name == "parent": @@ -28,6 +33,9 @@ class ParentizeMixin(BaseModel): @dataclass class NWBSchemaLangGenerator(PydanticGenerator): + """ + Customization of linkml pydantic generator + """ def __init__(self, *args, **kwargs): kwargs["injected_classes"] = [ParentizeMixin] @@ -38,12 +46,18 @@ class NWBSchemaLangGenerator(PydanticGenerator): super().__init__(*args, **kwargs) def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult: + """ + Add the ParentizeMixin to the bases of Dataset and Group + """ if cls.cls.name in ("Dataset", "Group"): cls.cls.bases = ["ConfiguredBaseModel", "ParentizeMixin"] return cls -def generate(): +def generate() -> None: + """ + Generate pydantic models for nwb_schema_language + """ schema = Path(__file__).parent / "schema" / "nwb_schema_language.yaml" output = Path(__file__).parent / "datamodel" / "nwb_schema_pydantic.py" generator = NWBSchemaLangGenerator(schema=schema) From cad57554fd04095fa49ffecb1ca7d122258d7891 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Fri, 13 Sep 2024 23:05:34 -0700 Subject: [PATCH 03/18] get ting there, working rolldown of extra attributes, but something still funny in patchclampseries children w.r.t. losing attributes in data --- .../src/nwb_linkml/adapters/namespaces.py | 266 ++++++++++++------ .../src/nwb_linkml/generators/pydantic.py | 15 +- nwb_linkml/src/nwb_linkml/util.py | 73 ----- .../datamodel/nwb_schema_pydantic.py | 11 +- .../src/nwb_schema_language/generator.py | 20 +- .../src/nwb_schema_language/util.py | 39 +++ scripts/generate_core.py | 5 +- 7 files changed, 264 insertions(+), 165 deletions(-) delete mode 100644 nwb_linkml/src/nwb_linkml/util.py create mode 100644 nwb_schema_language/src/nwb_schema_language/util.py diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 78e3027..afbb82d 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -8,7 +8,6 @@ for extracting information and generating translated schema import contextlib from copy import copy from pathlib import Path -from pprint import pformat from typing import Dict, Generator, List, Optional from linkml_runtime.dumpers import yaml_dumper @@ -19,7 +18,6 @@ from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.lang_elements import NwbLangSchema from nwb_linkml.ui import AdapterProgress -from nwb_linkml.util import merge_dicts from nwb_schema_language import Dataset, Group, Namespaces @@ -188,93 +186,105 @@ class NamespacesAdapter(Adapter): if not cls.neurodata_type_inc: continue - # get parents - parent = self.get(cls.neurodata_type_inc) - parents = [parent] - while parent.neurodata_type_inc: - parent = self.get(parent.neurodata_type_inc) - parents.insert(0, parent) - parents.append(cls) + parents = self._get_class_ancestors(cls, include_child=True) # merge and cast - # note that we don't want to exclude_none in the model dump here, - # if the child class has a field completely unset, we want to inherit it - # from the parent without rolling it down - we are only rolling down - # the things that need to be modified/merged in the child new_cls: dict = {} - for parent in parents: - new_cls = merge_dicts( - new_cls, - parent.model_dump(exclude_unset=True), - list_key="name", - exclude=["neurodata_type_def"], - ) + for i, parent in enumerate(parents): + # if parent.neurodata_type_def == "PatchClampSeries": + # pdb.set_trace() + complete = True + if i == len(parents) - 1: + complete = False + new_cls = roll_down_nwb_class(new_cls, parent, complete=complete) new_cls: Group | Dataset = type(cls)(**new_cls) new_cls.parent = cls.parent # reinsert - if new_cls.parent: - if isinstance(cls, Dataset): - new_cls.parent.datasets[new_cls.parent.datasets.index(cls)] = new_cls - else: - new_cls.parent.groups[new_cls.parent.groups.index(cls)] = new_cls + self._overwrite_class(new_cls, cls) + + def _get_class_ancestors( + self, cls: Dataset | Group, include_child: bool = True + ) -> list[Dataset | Group]: + """ + Get the chain of ancestor classes inherited via ``neurodata_type_inc`` + + Args: + cls (:class:`.Dataset` | :class:`.Group`): The class to get ancestors of + include_child (bool): If ``True`` (default), include ``cls`` in the output list + """ + parent = self.get(cls.neurodata_type_inc) + parents = [parent] + while parent.neurodata_type_inc: + parent = self.get(parent.neurodata_type_inc) + parents.insert(0, parent) + + if include_child: + parents.append(cls) + + return parents + + def _overwrite_class(self, new_cls: Dataset | Group, old_cls: Dataset | Group): + """ + Overwrite the version of a dataset or group that is stored in our schemas + """ + if old_cls.parent: + if isinstance(old_cls, Dataset): + new_cls.parent.datasets[new_cls.parent.datasets.index(old_cls)] = new_cls else: - # top level class, need to go and find it - found = False - for schema in self.all_schemas(): - if isinstance(cls, Dataset): - if cls in schema.datasets: - schema.datasets[schema.datasets.index(cls)] = new_cls - found = True - break - else: - if cls in schema.groups: - schema.groups[schema.groups.index(cls)] = new_cls - found = True - break - if not found: - raise KeyError( - f"Unable to find source schema for {cls} when reinserting after rolling" - " down!" - ) - - def find_type_source(self, name: str) -> SchemaAdapter: - """ - Given some neurodata_type_inc, find the schema that it's defined in. - - Rather than returning as soon as a match is found, check all - """ - # First check within the main schema - internal_matches = [] - for schema in self.schemas: - class_names = [cls.neurodata_type_def for cls in schema.created_classes] - if name in class_names: - internal_matches.append(schema) - - if len(internal_matches) > 1: - raise KeyError( - f"Found multiple schemas in namespace that define {name}:\ninternal:" - f" {pformat(internal_matches)}\nimported:{pformat(internal_matches)}" - ) - elif len(internal_matches) == 1: - return internal_matches[0] - - import_matches = [] - for imported_ns in self.imported: - for schema in imported_ns.schemas: - class_names = [cls.neurodata_type_def for cls in schema.created_classes] - if name in class_names: - import_matches.append(schema) - - if len(import_matches) > 1: - raise KeyError( - f"Found multiple schemas in namespace that define {name}:\ninternal:" - f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}" - ) - elif len(import_matches) == 1: - return import_matches[0] + new_cls.parent.groups[new_cls.parent.groups.index(old_cls)] = new_cls else: - raise KeyError(f"No schema found that define {name}") + # top level class, need to go and find it + schema = self.find_type_source(old_cls) + if isinstance(new_cls, Dataset): + schema.datasets[schema.datasets.index(old_cls)] = new_cls + else: + schema.groups[schema.groups.index(old_cls)] = new_cls + + def find_type_source(self, cls: str | Dataset | Group, fast: bool = False) -> SchemaAdapter: + """ + Given some type (as `neurodata_type_def`), find the schema that it's defined in. + + Rather than returning as soon as a match is found, ensure that duplicates are + not found within the primary schema, then so the same for all imported schemas. + + Args: + cls (str | :class:`.Dataset` | :class:`.Group`): The ``neurodata_type_def`` + to look for the source of. If a Dataset or Group, look for the object itself + (cls in schema.datasets), otherwise look for a class with a matching name. + fast (bool): If ``True``, return as soon as a match is found. + If ``False`, return after checking all schemas for duplicates. + + Returns: + :class:`.SchemaAdapter` + + Raises: + KeyError: if multiple schemas or no schemas are found + """ + matches = [] + for schema in self.all_schemas(): + in_schema = False + if isinstance(cls, str) and cls in [ + c.neurodata_type_def for c in schema.created_classes + ]: + in_schema = True + elif isinstance(cls, Dataset) and cls in schema.datasets: + in_schema = True + elif isinstance(cls, Group) and cls in schema.groups: + in_schema = True + + if in_schema: + if fast: + return schema + else: + matches.append(schema) + + if len(matches) > 1: + raise KeyError(f"Found multiple schemas in namespace that define {cls}:\n{matches}") + elif len(matches) == 1: + return matches[0] + else: + raise KeyError(f"No schema found that define {cls}") def _populate_imports(self) -> "NamespacesAdapter": """ @@ -378,3 +388,99 @@ class NamespacesAdapter(Adapter): for imported in self.imported: for sch in imported.schemas: yield sch + + +def roll_down_nwb_class( + source: Group | Dataset | dict, target: Group | Dataset | dict, complete: bool = False +) -> dict: + """ + Merge an ancestor (via ``neurodata_type_inc`` ) source class with a + child ``target`` class. + + On the first recurive pass, only those values that are set on the target are copied from the + source class - this isn't a true merging, what we are after is to recursively merge all the + values that are modified in the child class with those of the parent class below the top level, + the top-level attributes will be carried through via normal inheritance. + + Rather than re-instantiating the child class, we return the dictionary so that this + function can be used in series to merge a whole ancestry chain within + :class:`.NamespacesAdapter` , but this isn't exposed in the function since + class definitions can be spread out over many schemas, and we need the orchestration + of the adapter to have them in all cases we'd be using this. + + Args: + source (dict): source dictionary + target (dict): target dictionary (values merged over source) + complete (bool): (default ``False``)do a complete merge, merging everything + from source to target without trying to minimize redundancy. + Used to collapse ancestor classes before the terminal class. + + References: + https://github.com/NeurodataWithoutBorders/pynwb/issues/1954 + + """ + if isinstance(source, (Group, Dataset)): + source = source.model_dump(exclude_unset=True, exclude_none=True) + if isinstance(target, (Group, Dataset)): + target = target.model_dump(exclude_unset=True, exclude_none=True) + + exclude = ("neurodata_type_def",) + + # if we are on the first recursion, we exclude top-level items that are not set in the target + if complete: + ret = {k: v for k, v in source.items() if k not in exclude} + else: + ret = {k: v for k, v in source.items() if k not in exclude and k in target} + + for key, value in target.items(): + if key not in ret: + ret[key] = value + elif isinstance(value, dict): + if key in ret: + ret[key] = roll_down_nwb_class(ret[key], value, complete=True) + else: + ret[key] = value + elif isinstance(value, list) and all([isinstance(v, dict) for v in value]): + src_keys = {v["name"]: ret[key].index(v) for v in ret.get(key, {}) if "name" in v} + target_keys = {v["name"]: value.index(v) for v in value if "name" in v} + + new_val = [] + # screwy double iteration to preserve dict order + # all dicts not in target, if in depth > 0 + if complete: + new_val.extend( + [ + ret[key][src_keys[k]] + for k in src_keys + if k in set(src_keys.keys()) - set(target_keys.keys()) + ] + ) + # all dicts not in source + new_val.extend( + [ + value[target_keys[k]] + for k in target_keys + if k in set(target_keys.keys()) - set(src_keys.keys()) + ] + ) + # merge dicts in both + new_val.extend( + [ + roll_down_nwb_class(ret[key][src_keys[k]], value[target_keys[k]], complete=True) + for k in target_keys + if k in set(src_keys.keys()).intersection(set(target_keys.keys())) + ] + ) + new_val = sorted(new_val, key=lambda i: i["name"]) + # add any dicts that don't have the list_key + # they can't be merged since they can't be matched + if complete: + new_val.extend([v for v in ret.get(key, {}) if "name" not in v]) + new_val.extend([v for v in value if "name" not in v]) + + ret[key] = new_val + + else: + ret[key] = value + + return ret diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 1928cf5..927e9c2 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -136,7 +136,7 @@ class NWBPydanticGenerator(PydanticGenerator): """Customize dynamictable behavior""" cls = AfterGenerateClass.inject_dynamictable(cls) cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv) - cls = AfterGenerateClass.inject_elementidentifiers(cls, sv, self._get_element_import) + cls = AfterGenerateClass.inject_dynamictable_imports(cls, sv, self._get_element_import) cls = AfterGenerateClass.strip_vector_data_slots(cls, sv) return cls @@ -346,19 +346,22 @@ class AfterGenerateClass: return cls @staticmethod - def inject_elementidentifiers( + def inject_dynamictable_imports( cls: ClassResult, sv: SchemaView, import_method: Callable[[str], Import] ) -> ClassResult: """ - Inject ElementIdentifiers into module that define dynamictables - - needed to handle ID columns + Ensure that schema that contain dynamictables have all the imports needed to use them """ if ( cls.source.is_a == "DynamicTable" or "DynamicTable" in sv.class_ancestors(cls.source.name) ) and sv.schema.name != "hdmf-common.table": - imp = import_method("ElementIdentifiers") - cls.imports += [imp] + imp = [ + import_method("ElementIdentifiers"), + import_method("VectorData"), + import_method("VectorIndex"), + ] + cls.imports += imp return cls @staticmethod diff --git a/nwb_linkml/src/nwb_linkml/util.py b/nwb_linkml/src/nwb_linkml/util.py deleted file mode 100644 index ca85357..0000000 --- a/nwb_linkml/src/nwb_linkml/util.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -The much maligned junk drawer -""" - - -def merge_dicts( - source: dict, target: dict, list_key: str | None = None, exclude: list[str] | None = None -) -> dict: - """ - Deeply merge nested dictionaries, replacing already-declared keys rather than - e.g. merging lists as well - - Args: - source (dict): source dictionary - target (dict): target dictionary (values merged over source) - list_key (str | None): Optional: if present, merge lists of dicts using this to - identify matching dicts - exclude: (list[str] | None): Optional: if present, exclude keys from parent. - - References: - https://stackoverflow.com/a/20666342/13113166 - - """ - if exclude is None: - exclude = [] - ret = {k: v for k, v in source.items() if k not in exclude} - for key, value in target.items(): - if key not in ret: - ret[key] = value - elif isinstance(value, dict): - if key in ret: - ret[key] = merge_dicts(ret[key], value, list_key, exclude) - else: - ret[key] = value - elif isinstance(value, list) and list_key and all([isinstance(v, dict) for v in value]): - src_keys = {v[list_key]: ret[key].index(v) for v in ret.get(key, {}) if list_key in v} - target_keys = {v[list_key]: value.index(v) for v in value if list_key in v} - - # all dicts not in target - # screwy double iteration to preserve dict order - new_val = [ - ret[key][src_keys[k]] - for k in src_keys - if k in set(src_keys.keys()) - set(target_keys.keys()) - ] - # all dicts not in source - new_val.extend( - [ - value[target_keys[k]] - for k in target_keys - if k in set(target_keys.keys()) - set(src_keys.keys()) - ] - ) - # merge dicts in both - new_val.extend( - [ - merge_dicts(ret[key][src_keys[k]], value[target_keys[k]], list_key, exclude) - for k in target_keys - if k in set(src_keys.keys()).intersection(set(target_keys.keys())) - ] - ) - new_val = sorted(new_val, key=lambda i: i[list_key]) - # add any dicts that don't have the list_key - # they can't be merged since they can't be matched - new_val.extend([v for v in ret.get(key, {}) if list_key not in v]) - new_val.extend([v for v in value if list_key not in v]) - - ret[key] = new_val - - else: - ret[key] = value - - return ret diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py index ca7e8be..83f084c 100644 --- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py +++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py @@ -7,6 +7,7 @@ from decimal import Decimal from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union +from nwb_schema_language.util import pformat from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator @@ -23,7 +24,12 @@ class ConfiguredBaseModel(BaseModel): use_enum_values=True, strict=False, ) - pass + + def __repr__(self): + return pformat(self.model_dump(exclude={"parent": True}), self.__class__.__name__) + + def __str__(self): + return repr(self) class LinkMLMeta(RootModel): @@ -44,9 +50,10 @@ class LinkMLMeta(RootModel): class ParentizeMixin(BaseModel): + """Mixin to populate the parent field for nested datasets and groups""" @model_validator(mode="after") - def parentize(self): + def parentize(self) -> BaseModel: """Set the parent attribute for all our fields they have one""" for field_name in self.model_fields: if field_name == "parent": diff --git a/nwb_schema_language/src/nwb_schema_language/generator.py b/nwb_schema_language/src/nwb_schema_language/generator.py index 38519a4..7b0d289 100644 --- a/nwb_schema_language/src/nwb_schema_language/generator.py +++ b/nwb_schema_language/src/nwb_schema_language/generator.py @@ -31,6 +31,22 @@ class ParentizeMixin(BaseModel): return self +STR_METHOD = """ + def __repr__(self): + return pformat( + self.model_dump( + exclude={"parent": True}, + exclude_unset=True, + exclude_None=True + ), + self.__class__.__name__ + ) + + def __str__(self): + return repr(self) +""" + + @dataclass class NWBSchemaLangGenerator(PydanticGenerator): """ @@ -40,8 +56,10 @@ class NWBSchemaLangGenerator(PydanticGenerator): def __init__(self, *args, **kwargs): kwargs["injected_classes"] = [ParentizeMixin] kwargs["imports"] = [ - Import(module="pydantic", objects=[ObjectImport(name="model_validator")]) + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), + Import(module="nwb_schema_language.util", objects=[ObjectImport(name="pformat")]), ] + kwargs["injected_fields"] = [STR_METHOD] kwargs["black"] = True super().__init__(*args, **kwargs) diff --git a/nwb_schema_language/src/nwb_schema_language/util.py b/nwb_schema_language/src/nwb_schema_language/util.py new file mode 100644 index 0000000..61bc5ed --- /dev/null +++ b/nwb_schema_language/src/nwb_schema_language/util.py @@ -0,0 +1,39 @@ +from pprint import pformat as _pformat +import textwrap +import re + + +def pformat(fields: dict, cls_name: str, indent: str = " ") -> str: + """ + pretty format the fields of the items of a ``YAMLRoot`` object without the wonky indentation of pformat. + see ``YAMLRoot.__repr__``. + + formatting is similar to black - items at similar levels of nesting have similar levels of indentation, + rather than getting placed at essentially random levels of indentation depending on what came before them. + """ + res = [] + total_len = 0 + for key, val in fields.items(): + if val == [] or val == {} or val is None: + continue + # pformat handles everything else that isn't a YAMLRoot object, but it sure does look ugly + # use it to split lines and as the thing of last resort, but otherwise indent = 0, we'll do that + val_str = _pformat(val, indent=0, compact=True, sort_dicts=False) + # now we indent everything except the first line by indenting and then using regex to remove just the first indent + val_str = re.sub(rf"\A{re.escape(indent)}", "", textwrap.indent(val_str, indent)) + # now recombine with the key in a format that can be re-eval'd into an object if indent is just whitespace + val_str = f"'{key}': " + val_str + + # count the total length of this string so we know if we need to linebreak or not later + total_len += len(val_str) + res.append(val_str) + + if total_len > 80: + inside = ",\n".join(res) + # we indent twice - once for the inner contents of every inner object, and one to + # offset from the root element. that keeps us from needing to be recursive except for the + # single pformat call + inside = textwrap.indent(inside, indent) + return cls_name + "({\n" + inside + "\n})" + else: + return cls_name + "({" + ", ".join(res) + "})" diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 4aeb21a..55fc94e 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -67,7 +67,6 @@ def generate_versions( pydantic_path: Path, dry_run: bool = False, repo: GitRepo = NWB_CORE_REPO, - hdmf_only=False, pdb=False, ): """ @@ -253,10 +252,10 @@ def main(): args.yaml.mkdir(exist_ok=True) args.pydantic.mkdir(exist_ok=True) if args.latest: - generate_core_yaml(args.yaml, args.dry_run, args.hdmf) + generate_core_yaml(args.yaml, args.dry_run) generate_core_pydantic(args.yaml, args.pydantic, args.dry_run) else: - generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf, pdb=args.pdb) + generate_versions(args.yaml, args.pydantic, args.dry_run, repo, pdb=args.pdb) if __name__ == "__main__": From 1d27c6a25955d715c1bbd9e05a8588f48628fd09 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 19 Sep 2024 19:17:59 -0700 Subject: [PATCH 04/18] correctly generating rolled down classes. model update to follow --- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 46 ++++++++++++++++++- .../src/nwb_linkml/adapters/attribute.py | 44 ++---------------- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 17 ++++++- .../src/nwb_linkml/adapters/namespaces.py | 18 +++----- nwb_linkml/src/nwb_linkml/maps/dtype.py | 20 ++++++++ nwb_linkml/src/nwb_linkml/maps/quantity.py | 12 +++-- 6 files changed, 99 insertions(+), 58 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index cb16165..1ceb7b5 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -20,6 +20,7 @@ from linkml_runtime.linkml_model import ( from pydantic import BaseModel, PrivateAttr from nwb_linkml.logging import init_logger +from nwb_linkml.maps.dtype import float_types, integer_types, string_types from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema if sys.version_info.minor >= 11: @@ -308,5 +309,48 @@ def has_attrs(cls: Dataset) -> bool: return ( cls.attributes is not None and len(cls.attributes) > 0 - and all([not a.value for a in cls.attributes]) + and any([not a.value for a in cls.attributes]) ) + + +def defaults(cls: Dataset | Attribute) -> dict: + """ + Handle default values - + + * If ``value`` is present, yield `equals_string` or `equals_number` depending on dtype + **as well as** an ``ifabsent`` value - we both constrain the possible values to 1 + and also supply it as the default + * else, if ``default_value`` is present, yield an appropriate ``ifabsent`` value + * If neither, yield an empty dict + + Unlike nwb_schema_language, when ``value`` is set, we yield both a ``equals_*`` constraint + and an ``ifabsent`` constraint, because an ``equals_*`` can be declared without a default + in order to validate that a value is correctly set as the constrained value, and fail + if a value isn't provided. + """ + ret = {} + if cls.value: + if cls.dtype in integer_types: + ret["equals_number"] = cls.value + ret["ifabsent"] = f"integer({cls.value})" + elif cls.dtype in float_types: + ret["equals_number"] = cls.value + ret["ifabsent"] = f"float({cls.value})" + elif cls.dtype in string_types: + ret["equals_string"] = cls.value + ret["ifabsent"] = f"string({cls.value})" + else: + ret["equals_string"] = cls.value + ret["ifabsent"] = cls.value + + elif cls.default_value: + if cls.dtype in string_types: + ret["ifabsent"] = f"string({cls.default_value})" + elif cls.dtype in integer_types: + ret["ifabsent"] = f"int({cls.default_value})" + elif cls.dtype in float_types: + ret["ifabsent"] = f"float({cls.default_value})" + else: + ret["ifabsent"] = cls.default_value + + return ret diff --git a/nwb_linkml/src/nwb_linkml/adapters/attribute.py b/nwb_linkml/src/nwb_linkml/adapters/attribute.py index 7ae2ea1..8326a51 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/attribute.py +++ b/nwb_linkml/src/nwb_linkml/adapters/attribute.py @@ -7,26 +7,13 @@ from typing import ClassVar, Optional, Type, TypedDict from linkml_runtime.linkml_model.meta import SlotDefinition -from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d +from nwb_linkml.adapters.adapter import Adapter, BuildResult, defaults, is_1d from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.maps import Map from nwb_linkml.maps.dtype import handle_dtype, inlined from nwb_schema_language import Attribute -def _make_ifabsent(val: str | int | float | None) -> str | None: - if val is None: - return None - elif isinstance(val, str): - return f"string({val})" - elif isinstance(val, int): - return f"integer({val})" - elif isinstance(val, float): - return f"float({val})" - else: - return str(val) - - class AttrDefaults(TypedDict): """Default fields for an attribute""" @@ -38,31 +25,6 @@ class AttrDefaults(TypedDict): class AttributeMap(Map): """Base class for attribute mapping transformations :)""" - @classmethod - def handle_defaults(cls, attr: Attribute) -> AttrDefaults: - """ - Construct arguments for linkml slot default metaslots from nwb schema lang attribute props - """ - equals_string = None - equals_number = None - default_value = None - if attr.value: - if isinstance(attr.value, (int, float)): - equals_number = attr.value - elif attr.value: - equals_string = str(attr.value) - - if equals_number: - default_value = _make_ifabsent(equals_number) - elif equals_string: - default_value = _make_ifabsent(equals_string) - elif attr.default_value: - default_value = _make_ifabsent(attr.default_value) - - return AttrDefaults( - equals_string=equals_string, equals_number=equals_number, ifabsent=default_value - ) - @classmethod @abstractmethod def check(cls, attr: Attribute) -> bool: @@ -105,7 +67,7 @@ class MapScalar(AttributeMap): description=attr.doc, required=attr.required, inlined=inlined(attr.dtype), - **cls.handle_defaults(attr), + **defaults(attr), ) return BuildResult(slots=[slot]) @@ -154,7 +116,7 @@ class MapArray(AttributeMap): required=attr.required, inlined=inlined(attr.dtype), **expressions, - **cls.handle_defaults(attr), + **defaults(attr), ) return BuildResult(slots=[slot]) diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 39d4450..7b391de 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -7,7 +7,7 @@ from typing import ClassVar, Optional, Type from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition -from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound +from nwb_linkml.adapters.adapter import BuildResult, defaults, has_attrs, is_1d, is_compound from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.adapters.classes import ClassAdapter from nwb_linkml.maps import QUANTITY_MAP, Map @@ -108,6 +108,7 @@ class MapScalar(DatasetMap): description=cls.doc, range=handle_dtype(cls.dtype), **QUANTITY_MAP[cls.quantity], + **defaults(cls), ) res = BuildResult(slots=[this_slot]) return res @@ -208,7 +209,19 @@ class MapScalarAttributes(DatasetMap): """ Map to a scalar attribute with an adjoining "value" slot """ - value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True) + # the *value slot* within the generated class is always required, + # but the slot in the parent class referring to this one will indicate whether the whole + # thing is optional or not. You can't provide the attributes of the optional dataset + # without providing its value + quantity = QUANTITY_MAP[cls.quantity].copy() + quantity["required"] = True + + value_slot = SlotDefinition( + name="value", + range=handle_dtype(cls.dtype), + **quantity, + **defaults(cls), + ) res.classes[0].attributes["value"] = value_slot return res diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index afbb82d..326da7c 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -266,11 +266,7 @@ class NamespacesAdapter(Adapter): in_schema = False if isinstance(cls, str) and cls in [ c.neurodata_type_def for c in schema.created_classes - ]: - in_schema = True - elif isinstance(cls, Dataset) and cls in schema.datasets: - in_schema = True - elif isinstance(cls, Group) and cls in schema.groups: + ] or isinstance(cls, Dataset) and cls in schema.datasets or isinstance(cls, Group) and cls in schema.groups: in_schema = True if in_schema: @@ -397,16 +393,16 @@ def roll_down_nwb_class( Merge an ancestor (via ``neurodata_type_inc`` ) source class with a child ``target`` class. - On the first recurive pass, only those values that are set on the target are copied from the + On the first recursive pass, only those values that are set on the target are copied from the source class - this isn't a true merging, what we are after is to recursively merge all the values that are modified in the child class with those of the parent class below the top level, the top-level attributes will be carried through via normal inheritance. Rather than re-instantiating the child class, we return the dictionary so that this function can be used in series to merge a whole ancestry chain within - :class:`.NamespacesAdapter` , but this isn't exposed in the function since - class definitions can be spread out over many schemas, and we need the orchestration - of the adapter to have them in all cases we'd be using this. + :class:`.NamespacesAdapter` , but merging isn't exposed in the function since + ancestor class definitions can be spread out over many schemas, + and we need the orchestration of the adapter to have them in all cases we'd be using this. Args: source (dict): source dictionary @@ -420,9 +416,9 @@ def roll_down_nwb_class( """ if isinstance(source, (Group, Dataset)): - source = source.model_dump(exclude_unset=True, exclude_none=True) + source = source.model_dump(exclude_none=True) if isinstance(target, (Group, Dataset)): - target = target.model_dump(exclude_unset=True, exclude_none=True) + target = target.model_dump(exclude_none=True) exclude = ("neurodata_type_def",) diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 2497a65..6d944dd 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -66,6 +66,26 @@ flat_to_np = { "isodatetime": np.datetime64, } +integer_types = { + "long", + "int64", + "int", + "int32", + "int16", + "short", + "int8", + "uint", + "uint32", + "uint16", + "uint8", + "uint64", +} + +float_types = {"float", "float32", "double", "float64", "numeric"} + +string_types = {"text", "utf", "utf8", "utf_8", "ascii"} + + np_to_python = { Any: Any, np.number: float, diff --git a/nwb_linkml/src/nwb_linkml/maps/quantity.py b/nwb_linkml/src/nwb_linkml/maps/quantity.py index 8980076..7ae870a 100644 --- a/nwb_linkml/src/nwb_linkml/maps/quantity.py +++ b/nwb_linkml/src/nwb_linkml/maps/quantity.py @@ -9,10 +9,16 @@ We will handle cardinality of array dimensions elsewhere """ QUANTITY_MAP = { - "*": {"required": False, "multivalued": True}, + "*": {"required": None, "multivalued": True}, "+": {"required": True, "multivalued": True}, - "?": {"required": False, "multivalued": False}, - 1: {"required": True, "multivalued": False}, + "?": {"required": None, "multivalued": None}, + 1: {"required": True, "multivalued": None}, # include the NoneType for indexing None: {"required": None, "multivalued": None}, } +""" +Map between NWB quantity values and linkml quantity metaslot values. + +Use ``None`` for defaults (required: False, multivalued: False) rather than ``False`` +to avoid adding unnecessary attributes +""" From 03ba6568a3de3570e7c68f3abce3e9dc9984e3bd Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 19 Sep 2024 19:21:03 -0700 Subject: [PATCH 05/18] lint --- .../src/nwb_linkml/adapters/namespaces.py | 13 +++++--- .../src/nwb_schema_language/util.py | 30 ++++++++++++------- 2 files changed, 29 insertions(+), 14 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 326da7c..70e6f89 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -224,7 +224,7 @@ class NamespacesAdapter(Adapter): return parents - def _overwrite_class(self, new_cls: Dataset | Group, old_cls: Dataset | Group): + def _overwrite_class(self, new_cls: Dataset | Group, old_cls: Dataset | Group) -> None: """ Overwrite the version of a dataset or group that is stored in our schemas """ @@ -264,9 +264,14 @@ class NamespacesAdapter(Adapter): matches = [] for schema in self.all_schemas(): in_schema = False - if isinstance(cls, str) and cls in [ - c.neurodata_type_def for c in schema.created_classes - ] or isinstance(cls, Dataset) and cls in schema.datasets or isinstance(cls, Group) and cls in schema.groups: + if ( + isinstance(cls, str) + and cls in [c.neurodata_type_def for c in schema.created_classes] + or isinstance(cls, Dataset) + and cls in schema.datasets + or isinstance(cls, Group) + and cls in schema.groups + ): in_schema = True if in_schema: diff --git a/nwb_schema_language/src/nwb_schema_language/util.py b/nwb_schema_language/src/nwb_schema_language/util.py index 61bc5ed..ef910a8 100644 --- a/nwb_schema_language/src/nwb_schema_language/util.py +++ b/nwb_schema_language/src/nwb_schema_language/util.py @@ -1,27 +1,37 @@ -from pprint import pformat as _pformat -import textwrap +""" +The fabled junk drawer +""" + import re +import textwrap +from pprint import pformat as _pformat def pformat(fields: dict, cls_name: str, indent: str = " ") -> str: """ - pretty format the fields of the items of a ``YAMLRoot`` object without the wonky indentation of pformat. - see ``YAMLRoot.__repr__``. + pretty format the fields of the items of a ``YAMLRoot`` object without the + wonky indentation of pformat. - formatting is similar to black - items at similar levels of nesting have similar levels of indentation, - rather than getting placed at essentially random levels of indentation depending on what came before them. + formatting is similar to black - + items at similar levels of nesting have similar levels of indentation, + rather than getting placed at essentially random levels of indentation + depending on what came before them. """ res = [] total_len = 0 for key, val in fields.items(): if val == [] or val == {} or val is None: continue - # pformat handles everything else that isn't a YAMLRoot object, but it sure does look ugly - # use it to split lines and as the thing of last resort, but otherwise indent = 0, we'll do that + # pformat handles everything else that isn't a YAMLRoot object, + # but it sure does look ugly + # use it to split lines and as the thing of last resort, + # but otherwise indent = 0, we'll do that val_str = _pformat(val, indent=0, compact=True, sort_dicts=False) - # now we indent everything except the first line by indenting and then using regex to remove just the first indent + # now we indent everything except the first line by indenting + # and then using regex to remove just the first indent val_str = re.sub(rf"\A{re.escape(indent)}", "", textwrap.indent(val_str, indent)) - # now recombine with the key in a format that can be re-eval'd into an object if indent is just whitespace + # now recombine with the key in a format that can be re-eval'd + # into an object if indent is just whitespace val_str = f"'{key}': " + val_str # count the total length of this string so we know if we need to linebreak or not later From e06c8ad656bb736ffdf1f59e6d3e9ac9c7aec01d Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 19 Sep 2024 19:22:33 -0700 Subject: [PATCH 06/18] model update --- .../pydantic/core/v2_2_0/core_nwb_base.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_device.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_file.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_image.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 2 +- .../core/v2_2_0/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_2_0/namespace.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_base.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_device.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_file.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_image.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 2 +- .../core/v2_2_1/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_2_1/namespace.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_base.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_device.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_file.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_image.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 2 +- .../core/v2_2_2/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_2_2/namespace.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_base.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_device.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_file.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_image.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 2 +- .../core/v2_2_4/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_2_4/namespace.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_base.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_device.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_file.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_image.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 2 +- .../core/v2_2_5/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_2_5/namespace.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_base.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_3_0/core_nwb_device.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 112 +++- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_file.py | 9 +- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 152 ++++-- .../pydantic/core/v2_3_0/core_nwb_image.py | 153 +++++- .../pydantic/core/v2_3_0/core_nwb_misc.py | 284 ++++++++-- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 44 +- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 69 ++- .../core/v2_3_0/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_3_0/namespace.py | 19 +- .../pydantic/core/v2_4_0/core_nwb_base.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_4_0/core_nwb_device.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 112 +++- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_file.py | 9 +- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 158 ++++-- .../pydantic/core/v2_4_0/core_nwb_image.py | 143 ++++- .../pydantic/core/v2_4_0/core_nwb_misc.py | 284 ++++++++-- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 44 +- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 64 ++- .../core/v2_4_0/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_4_0/namespace.py | 19 +- .../pydantic/core/v2_5_0/core_nwb_base.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 20 +- .../pydantic/core/v2_5_0/core_nwb_device.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 120 ++++- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_file.py | 9 +- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 178 +++++-- .../pydantic/core/v2_5_0/core_nwb_image.py | 153 +++++- .../pydantic/core/v2_5_0/core_nwb_misc.py | 300 +++++++++-- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 48 +- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 68 ++- .../core/v2_5_0/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_5_0/namespace.py | 19 +- .../core/v2_6_0_alpha/core_nwb_base.py | 2 +- .../core/v2_6_0_alpha/core_nwb_behavior.py | 20 +- .../core/v2_6_0_alpha/core_nwb_device.py | 2 +- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 120 ++++- .../core/v2_6_0_alpha/core_nwb_epoch.py | 2 +- .../core/v2_6_0_alpha/core_nwb_file.py | 9 +- .../core/v2_6_0_alpha/core_nwb_icephys.py | 178 +++++-- .../core/v2_6_0_alpha/core_nwb_image.py | 153 +++++- .../core/v2_6_0_alpha/core_nwb_misc.py | 300 +++++++++-- .../core/v2_6_0_alpha/core_nwb_ogen.py | 48 +- .../core/v2_6_0_alpha/core_nwb_ophys.py | 95 +++- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 2 +- .../pydantic/core/v2_6_0_alpha/namespace.py | 19 +- .../pydantic/core/v2_7_0/core_nwb_base.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 20 +- .../pydantic/core/v2_7_0/core_nwb_device.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 120 ++++- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_file.py | 9 +- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 178 +++++-- .../pydantic/core/v2_7_0/core_nwb_image.py | 153 +++++- .../pydantic/core/v2_7_0/core_nwb_misc.py | 300 +++++++++-- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 48 +- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 91 +++- .../core/v2_7_0/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_7_0/namespace.py | 19 +- .../models/pydantic/hdmf_common/__init__.py | 1 - .../hdmf_common/v1_4_0/hdmf_common_table.py | 6 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 6 +- .../hdmf_common/v1_5_1/hdmf_common_table.py | 6 +- .../hdmf_common/v1_6_0/hdmf_common_table.py | 6 +- .../hdmf_common/v1_7_0/hdmf_common_table.py | 6 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 6 +- .../linkml/core/v2_3_0/core.nwb.base.yaml | 5 - .../linkml/core/v2_3_0/core.nwb.behavior.yaml | 39 +- .../linkml/core/v2_3_0/core.nwb.ecephys.yaml | 223 +++++--- .../linkml/core/v2_3_0/core.nwb.epoch.yaml | 12 - .../linkml/core/v2_3_0/core.nwb.file.yaml | 76 --- .../linkml/core/v2_3_0/core.nwb.icephys.yaml | 348 ++++++++---- .../linkml/core/v2_3_0/core.nwb.image.yaml | 245 +++++++-- .../linkml/core/v2_3_0/core.nwb.misc.yaml | 461 ++++++++++++---- .../linkml/core/v2_3_0/core.nwb.ogen.yaml | 72 ++- .../linkml/core/v2_3_0/core.nwb.ophys.yaml | 147 +++--- .../core/v2_3_0/core.nwb.retinotopy.yaml | 11 - .../linkml/core/v2_4_0/core.nwb.base.yaml | 8 - .../linkml/core/v2_4_0/core.nwb.behavior.yaml | 39 +- .../linkml/core/v2_4_0/core.nwb.ecephys.yaml | 223 +++++--- .../linkml/core/v2_4_0/core.nwb.epoch.yaml | 12 - .../linkml/core/v2_4_0/core.nwb.file.yaml | 86 --- .../linkml/core/v2_4_0/core.nwb.icephys.yaml | 375 ++++++++----- .../linkml/core/v2_4_0/core.nwb.image.yaml | 245 +++++++-- .../linkml/core/v2_4_0/core.nwb.misc.yaml | 461 ++++++++++++---- .../linkml/core/v2_4_0/core.nwb.ogen.yaml | 72 ++- .../linkml/core/v2_4_0/core.nwb.ophys.yaml | 147 +++--- .../core/v2_4_0/core.nwb.retinotopy.yaml | 11 - .../linkml/core/v2_5_0/core.nwb.base.yaml | 10 - .../linkml/core/v2_5_0/core.nwb.behavior.yaml | 48 +- .../linkml/core/v2_5_0/core.nwb.ecephys.yaml | 241 ++++++--- .../linkml/core/v2_5_0/core.nwb.epoch.yaml | 6 - .../linkml/core/v2_5_0/core.nwb.file.yaml | 86 --- .../linkml/core/v2_5_0/core.nwb.icephys.yaml | 424 ++++++++++----- .../linkml/core/v2_5_0/core.nwb.image.yaml | 260 +++++++-- .../linkml/core/v2_5_0/core.nwb.misc.yaml | 497 ++++++++++++++---- .../linkml/core/v2_5_0/core.nwb.ogen.yaml | 81 ++- .../linkml/core/v2_5_0/core.nwb.ophys.yaml | 156 +++--- .../core/v2_5_0/core.nwb.retinotopy.yaml | 11 - .../core/v2_6_0_alpha/core.nwb.base.yaml | 10 - .../core/v2_6_0_alpha/core.nwb.behavior.yaml | 48 +- .../core/v2_6_0_alpha/core.nwb.ecephys.yaml | 241 ++++++--- .../core/v2_6_0_alpha/core.nwb.epoch.yaml | 6 - .../core/v2_6_0_alpha/core.nwb.file.yaml | 86 --- .../core/v2_6_0_alpha/core.nwb.icephys.yaml | 424 ++++++++++----- .../core/v2_6_0_alpha/core.nwb.image.yaml | 260 +++++++-- .../core/v2_6_0_alpha/core.nwb.misc.yaml | 497 ++++++++++++++---- .../core/v2_6_0_alpha/core.nwb.ogen.yaml | 81 ++- .../core/v2_6_0_alpha/core.nwb.ophys.yaml | 197 +++---- .../v2_6_0_alpha/core.nwb.retinotopy.yaml | 11 - .../linkml/core/v2_7_0/core.nwb.base.yaml | 10 - .../linkml/core/v2_7_0/core.nwb.behavior.yaml | 48 +- .../linkml/core/v2_7_0/core.nwb.ecephys.yaml | 241 ++++++--- .../linkml/core/v2_7_0/core.nwb.epoch.yaml | 6 - .../linkml/core/v2_7_0/core.nwb.file.yaml | 86 --- .../linkml/core/v2_7_0/core.nwb.icephys.yaml | 426 ++++++++++----- .../linkml/core/v2_7_0/core.nwb.image.yaml | 260 +++++++-- .../linkml/core/v2_7_0/core.nwb.misc.yaml | 497 ++++++++++++++---- .../linkml/core/v2_7_0/core.nwb.ogen.yaml | 101 +++- .../linkml/core/v2_7_0/core.nwb.ophys.yaml | 197 +++---- .../core/v2_7_0/core.nwb.retinotopy.yaml | 11 - .../hdmf_common/v1_4_0/hdmf-common.table.yaml | 17 +- .../hdmf_common/v1_5_0/hdmf-common.table.yaml | 17 +- .../hdmf_common/v1_5_1/hdmf-common.table.yaml | 17 +- .../hdmf_common/v1_6_0/hdmf-common.table.yaml | 17 +- .../hdmf_common/v1_7_0/hdmf-common.table.yaml | 17 +- .../hdmf_common/v1_8_0/hdmf-common.table.yaml | 17 +- .../v0_1_0/hdmf-experimental.resources.yaml | 16 - .../v0_2_0/hdmf-experimental.resources.yaml | 17 - .../v0_3_0/hdmf-experimental.resources.yaml | 17 - .../v0_4_0/hdmf-experimental.resources.yaml | 19 - .../v0_5_0/hdmf-experimental.resources.yaml | 19 - 203 files changed, 9981 insertions(+), 3800 deletions(-) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py index 263d389..3a7121f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 5691dab..3cdb697 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py index ab24817..35e469e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 136ec40..50c35df 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 4ab3c01..d9e8670 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py index ae16391..574b467 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 439d5af..c118384 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py index 33784d6..1f4d01f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py index e8a4896..97cc604 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 998dda0..1fec83c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 70db9d7..587c4a8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 17edeec..66760ac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py index d4b265d..39065e8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py @@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py index f0f43be..d509055 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py index e96918c..9520fc7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py index 80de9c0..c6c0821 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index 169dd5e..59392d2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py index ed1353e..6b07a73 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py index b5a0b9b..bb40dc6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 991c1e8..e55e757 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py index 52c10a5..ee1b247 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py index 19a036f..4ccb41a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 609baf0..c26180e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py index a951c51..27ca8a7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 1c6f4ad..8b286c0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py index 7f2ade1..1000273 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py @@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py index 956e37d..1608518 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 271fceb..7a295d0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py index 28aa954..952fd14 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 9664726..678627d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py index c12a965..4398b2e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py index ec66471..a754163 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 9b7729d..7e8e860 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py index 6e805b1..c6a1bdf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py index d80af52..6ebbb31 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py index debdaf9..d8706fb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py index e7b56da..9355878 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index bfa2ad5..4a6bba8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py index 9ba793b..68aa1ca 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py @@ -152,7 +152,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py index 0e81486..79dbd8e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 42613b4..d8c8111 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py index 1aeeb6c..bd4959b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index d4f5172..d5ca311 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 61f894b..7d0e888 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py index 9167a4d..ffc9887 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 8067eb7..944696c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py index 05c1d6e..ec713bb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py index 5ff807c..6fb5183 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 20f6353..959ec53 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py index b91e448..f8b39ac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 362bc59..271df8c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py index 23ec3dd..ba443d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py @@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py index 86fe03f..14986e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py index f4f5e96..686f581 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py index 5abfc5d..cc5f2a1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 48d2503..78d83f8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 6a8ba5a..69289cd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py index 59aa79e..2a7b510 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py index ee68bff..4d04eec 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py index f3d0d5f..22bc4dc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py index 5faeb05..2730432 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py index 6c81182..7a31e4c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 98c3a53..f7e0674 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 5466646..5594f52 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py index 5d12f36..4f78598 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py @@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py index ad3c5f4..0450a89 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py index 8358db6..fbf64a4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -176,6 +176,20 @@ class SpatialSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py index 5c0f451..08d5073 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 2676bd5..6630b23 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -156,11 +156,12 @@ class ElectricalSeries(TimeSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) - data: Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], - ] = Field(..., description="""Recorded voltage data.""") + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -173,11 +174,6 @@ class ElectricalSeries(TimeSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -215,6 +211,45 @@ class ElectricalSeries(TimeSeries): ) +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and 'channel_conversion' (if present).""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class SpikeEventSeries(ElectricalSeries): """ Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -225,10 +260,7 @@ class SpikeEventSeries(ElectricalSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], - ] = Field(..., description="""Spike waveforms.""") + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", @@ -238,6 +270,11 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -250,11 +287,6 @@ class SpikeEventSeries(ElectricalSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -287,6 +319,44 @@ class SpikeEventSeries(ElectricalSeries): ) +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Unit of measurement for waveforms, which is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class FeatureExtraction(NWBDataInterface): """ Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. @@ -561,7 +631,9 @@ class Clustering(NWBDataInterface): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ElectricalSeries.model_rebuild() +ElectricalSeriesData.model_rebuild() SpikeEventSeries.model_rebuild() +SpikeEventSeriesData.model_rebuild() FeatureExtraction.model_rebuild() EventDetection.model_rebuild() EventWaveform.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 93ea1ba..96ec1a4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py index d692065..7cb5cb1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -25,7 +25,12 @@ from ...core.v2_3_0.core_nwb_icephys import IntracellularElectrode, SweepTable from ...core.v2_3_0.core_nwb_misc import Units from ...core.v2_3_0.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_3_0.core_nwb_ophys import ImagingPlane -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + DynamicTable, + ElementIdentifiers, + VectorData, + VectorIndex, +) metamodel_version = "None" @@ -36,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 1fb2a04..6be9e7e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -224,6 +224,20 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -243,12 +257,12 @@ class CurrentClampSeries(PatchClampSeries): ) name: str = Field(...) - data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: str = Field( ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) @@ -316,12 +330,28 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IZeroClampSeries(CurrentClampSeries): @@ -476,6 +506,20 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["amperes"] = Field( "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -483,7 +527,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class VoltageClampSeries(PatchClampSeries): @@ -496,13 +542,13 @@ class VoltageClampSeries(PatchClampSeries): ) name: str = Field(...) - data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field( None, description="""Fast capacitance, in farads.""" ) capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field( None, description="""Slow capacitance, in farads.""" ) + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -574,27 +620,6 @@ class VoltageClampSeries(PatchClampSeries): ) -class VoltageClampSeriesData(ConfiguredBaseModel): - """ - Recorded current. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - unit: Literal["amperes"] = Field( - "amperes", - description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", - json_schema_extra={ - "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} - }, - ) - value: Any = Field(...) - - class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): """ Fast capacitance, in farads. @@ -647,6 +672,43 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): value: float = Field(...) +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["amperes"] = Field( + "amperes", + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): """ Resistance compensation bandwidth, in hertz. @@ -851,12 +913,28 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IntracellularElectrode(NWBContainer): @@ -906,15 +984,6 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: VectorData[NDArray[Any, int]] = Field( - ..., - description="""Sweep number of the PatchClampSeries in that row.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, - ) series: VectorData[NDArray[Any, PatchClampSeries]] = Field( ..., description="""The PatchClampSeries with the sweep number in that row.""", @@ -936,6 +1005,15 @@ class SweepTable(DynamicTable): } }, ) + sweep_number: VectorData[NDArray[Any, int]] = Field( + ..., + description="""Sweep number of the PatchClampSeries in that row.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -958,9 +1036,9 @@ IZeroClampSeries.model_rebuild() CurrentClampStimulusSeries.model_rebuild() CurrentClampStimulusSeriesData.model_rebuild() VoltageClampSeries.model_rebuild() -VoltageClampSeriesData.model_rebuild() VoltageClampSeriesCapacitanceFast.model_rebuild() VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesData.model_rebuild() VoltageClampSeriesResistanceCompBandwidth.model_rebuild() VoltageClampSeriesResistanceCompCorrection.model_rebuild() VoltageClampSeriesResistanceCompPrediction.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py index 8758ca8..b080a46 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -198,12 +198,9 @@ class ImageSeries(TimeSeries): ) name: str = Field(...) - data: Optional[ - Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], - ] - ] = Field(None, description="""Binary data representing images across frames.""") + data: Optional[ImageSeriesData] = Field( + None, description="""Binary data representing images across frames.""" + ) dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", @@ -214,8 +211,9 @@ class ImageSeries(TimeSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -263,6 +261,43 @@ class ImageSeries(TimeSeries): ) +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] + ] = Field(None) + + class ImageSeriesExternalFile(ConfiguredBaseModel): """ Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. @@ -304,12 +339,9 @@ class ImageMaskSeries(ImageSeries): } }, ) - data: Optional[ - Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], - ] - ] = Field(None, description="""Binary data representing images across frames.""") + data: Optional[ImageSeriesData] = Field( + None, description="""Binary data representing images across frames.""" + ) dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", @@ -320,8 +352,9 @@ class ImageMaskSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -379,6 +412,9 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) + data: OpticalSeriesData = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) @@ -387,10 +423,6 @@ class OpticalSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -405,8 +437,9 @@ class OpticalSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -454,6 +487,43 @@ class OpticalSeries(ImageSeries): ) +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + ] + ] = Field(None) + + class IndexSeries(TimeSeries): """ Stores indices to image frames stored in an ImageSeries. The purpose of the ImageIndexSeries is to allow a static image stack to be stored somewhere, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced ImageSeries, and the timestamps array indicates when that image was displayed. @@ -464,10 +534,8 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Index of the frame in the referenced ImageSeries.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IndexSeriesData = Field( + ..., description="""Index of the frame in the referenced ImageSeries.""" ) indexed_timeseries: Union[ImageSeries, str] = Field( ..., @@ -515,13 +583,50 @@ class IndexSeries(TimeSeries): ) +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the frame in the referenced ImageSeries. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model GrayscaleImage.model_rebuild() RGBImage.model_rebuild() RGBAImage.model_rebuild() ImageSeries.model_rebuild() +ImageSeriesData.model_rebuild() ImageSeriesExternalFile.model_rebuild() ImageMaskSeries.model_rebuild() OpticalSeries.model_rebuild() +OpticalSeriesData.model_rebuild() IndexSeries.model_rebuild() +IndexSeriesData.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py index ac3b366..90ee48e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -213,6 +213,20 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "see ", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", @@ -236,10 +250,8 @@ class AnnotationSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], str] = Field( - ..., - description="""Annotations made during an experiment.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: AnnotationSeriesData = Field( + ..., description="""Annotations made during an experiment.""" ) description: Optional[str] = Field( "no description", @@ -278,6 +290,43 @@ class AnnotationSeries(TimeSeries): ) +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], str]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class IntervalSeries(TimeSeries): """ Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. @@ -288,10 +337,8 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Use values >0 if interval started, <0 if interval ended.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IntervalSeriesData = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) description: Optional[str] = Field( "no description", @@ -330,6 +377,43 @@ class IntervalSeries(TimeSeries): ) +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class DecompositionSeries(TimeSeries): """ Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -417,6 +501,20 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -504,9 +602,18 @@ class Units(DynamicTable): ) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) - spike_times_index: Optional[Named[VectorIndex]] = Field( + electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( None, - description="""Index into the spike_times dataset.""", + description="""Electrode group that each spike unit came from.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) + electrodes: Optional[Named[DynamicTableRegion]] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -516,12 +623,9 @@ class Units(DynamicTable): } }, ) - spike_times: Optional[UnitsSpikeTimes] = Field( - None, description="""Spike times for each unit.""" - ) - obs_intervals_index: Optional[Named[VectorIndex]] = Field( + electrodes_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into the obs_intervals dataset.""", + description="""Index into electrodes.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -547,9 +651,9 @@ class Units(DynamicTable): }, ) ) - electrodes_index: Optional[Named[VectorIndex]] = Field( + obs_intervals_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into electrodes.""", + description="""Index into the obs_intervals dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -559,9 +663,12 @@ class Units(DynamicTable): } }, ) - electrodes: Optional[Named[DynamicTableRegion]] = Field( + spike_times: Optional[UnitsSpikeTimes] = Field( + None, description="""Spike times for each unit.""" + ) + spike_times_index: Optional[Named[VectorIndex]] = Field( None, - description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + description="""Index into the spike_times dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -571,41 +678,15 @@ class Units(DynamicTable): } }, ) - electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( - None, - description="""Electrode group that each spike unit came from.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, + waveform_mean: Optional[UnitsWaveformMean] = Field( + None, description="""Spike waveform mean for each spike unit.""" ) - waveform_mean: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( - Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, - ) + waveform_sd: Optional[UnitsWaveformSd] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[UnitsWaveforms] = Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", ) waveforms_index: Optional[Named[VectorIndex]] = Field( None, @@ -671,14 +752,109 @@ class UnitsSpikeTimes(VectorData): ] = Field(None) +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_mean"] = Field( + "waveform_mean", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_sd"] = Field( + "waveform_sd", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveforms"] = Field( + "waveforms", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model AbstractFeatureSeries.model_rebuild() AbstractFeatureSeriesData.model_rebuild() AnnotationSeries.model_rebuild() +AnnotationSeriesData.model_rebuild() IntervalSeries.model_rebuild() +IntervalSeriesData.model_rebuild() DecompositionSeries.model_rebuild() DecompositionSeriesData.model_rebuild() DecompositionSeriesBands.model_rebuild() Units.model_rebuild() UnitsSpikeTimes.model_rebuild() +UnitsWaveformMean.model_rebuild() +UnitsWaveformSd.model_rebuild() +UnitsWaveforms.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py index bf95c5c..e55129b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -121,10 +121,8 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], float] = Field( - ..., - description="""Applied power for optogenetic stimulus, in watts.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: OptogeneticSeriesData = Field( + ..., description="""Applied power for optogenetic stimulus, in watts.""" ) site: Union[OptogeneticStimulusSite, str] = Field( ..., @@ -172,6 +170,41 @@ class OptogeneticSeries(TimeSeries): ) +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["watts"] = Field( + "watts", + description="""Unit of measurement for data, which is fixed to 'watts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class OptogeneticStimulusSite(NWBContainer): """ A site of optogenetic stimulation. @@ -202,4 +235,5 @@ class OptogeneticStimulusSite(NWBContainer): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model OptogeneticSeries.model_rebuild() +OptogeneticSeriesData.model_rebuild() OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 670269a..f6eb103 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -27,7 +27,7 @@ from ...core.v2_3_0.core_nwb_base import ( TimeSeriesSync, ) from ...core.v2_3_0.core_nwb_device import Device -from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile +from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile from ...hdmf_common.v1_5_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -179,12 +179,9 @@ class TwoPhotonSeries(ImageSeries): } }, ) - data: Optional[ - Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], - ] - ] = Field(None, description="""Binary data representing images across frames.""") + data: Optional[ImageSeriesData] = Field( + None, description="""Binary data representing images across frames.""" + ) dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", @@ -195,8 +192,9 @@ class TwoPhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -254,9 +252,7 @@ class RoiResponseSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field(..., description="""Signals from ROIs.""") + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", @@ -306,6 +302,42 @@ class RoiResponseSeries(TimeSeries): ) +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class DfOverF(NWBDataInterface): """ dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). @@ -372,6 +404,10 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) pixel_mask_index: Optional[Named[VectorIndex]] = Field( None, description="""Index into pixel_mask.""", @@ -384,9 +420,9 @@ class PlaneSegmentation(DynamicTable): } }, ) - pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( None, - description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) voxel_mask_index: Optional[Named[VectorIndex]] = Field( None, @@ -400,10 +436,6 @@ class PlaneSegmentation(DynamicTable): } }, ) - voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( - None, - description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", - ) reference_images: Optional[Dict[str, ImageSeries]] = Field( None, description="""Image stacks that the segmentation masks apply to.""", @@ -702,6 +734,7 @@ class CorrectedImageStack(NWBDataInterface): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TwoPhotonSeries.model_rebuild() RoiResponseSeries.model_rebuild() +RoiResponseSeriesData.model_rebuild() DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 5c78658..38f2e67 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py index 2125d57..a0842a1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py @@ -38,6 +38,7 @@ from ...core.v2_3_0.core_nwb_ecephys import ( ClusterWaveforms, Clustering, ElectricalSeries, + ElectricalSeriesData, ElectrodeGroup, ElectrodeGroupPosition, EventDetection, @@ -46,6 +47,7 @@ from ...core.v2_3_0.core_nwb_ecephys import ( FilteredEphys, LFP, SpikeEventSeries, + SpikeEventSeriesData, ) from ...core.v2_3_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries from ...core.v2_3_0.core_nwb_file import ( @@ -87,9 +89,12 @@ from ...core.v2_3_0.core_nwb_image import ( GrayscaleImage, ImageMaskSeries, ImageSeries, + ImageSeriesData, ImageSeriesExternalFile, IndexSeries, + IndexSeriesData, OpticalSeries, + OpticalSeriesData, RGBAImage, RGBImage, ) @@ -97,14 +102,23 @@ from ...core.v2_3_0.core_nwb_misc import ( AbstractFeatureSeries, AbstractFeatureSeriesData, AnnotationSeries, + AnnotationSeriesData, DecompositionSeries, DecompositionSeriesBands, DecompositionSeriesData, IntervalSeries, + IntervalSeriesData, Units, UnitsSpikeTimes, + UnitsWaveformMean, + UnitsWaveformSd, + UnitsWaveforms, +) +from ...core.v2_3_0.core_nwb_ogen import ( + OptogeneticSeries, + OptogeneticSeriesData, + OptogeneticStimulusSite, ) -from ...core.v2_3_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite from ...core.v2_3_0.core_nwb_ophys import ( CorrectedImageStack, DfOverF, @@ -120,6 +134,7 @@ from ...core.v2_3_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, RoiResponseSeries, + RoiResponseSeriesData, TwoPhotonSeries, ) from ...core.v2_3_0.core_nwb_retinotopy import ( @@ -161,7 +176,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py index f8b6d99..0948248 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 7c0abb8..074546e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -176,6 +176,20 @@ class SpatialSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py index 436d2d4..c253e9e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index ac26b29..64c0ab0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -156,11 +156,12 @@ class ElectricalSeries(TimeSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) - data: Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], - ] = Field(..., description="""Recorded voltage data.""") + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -173,11 +174,6 @@ class ElectricalSeries(TimeSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -215,6 +211,45 @@ class ElectricalSeries(TimeSeries): ) +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and 'channel_conversion' (if present).""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class SpikeEventSeries(ElectricalSeries): """ Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -225,10 +260,7 @@ class SpikeEventSeries(ElectricalSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], - ] = Field(..., description="""Spike waveforms.""") + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", @@ -238,6 +270,11 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -250,11 +287,6 @@ class SpikeEventSeries(ElectricalSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -287,6 +319,44 @@ class SpikeEventSeries(ElectricalSeries): ) +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Unit of measurement for waveforms, which is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class FeatureExtraction(NWBDataInterface): """ Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. @@ -561,7 +631,9 @@ class Clustering(NWBDataInterface): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ElectricalSeries.model_rebuild() +ElectricalSeriesData.model_rebuild() SpikeEventSeries.model_rebuild() +SpikeEventSeriesData.model_rebuild() FeatureExtraction.model_rebuild() EventDetection.model_rebuild() EventWaveform.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 25894a3..6904077 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py index 84d5b9a..c92a64b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -33,7 +33,12 @@ from ...core.v2_4_0.core_nwb_icephys import ( from ...core.v2_4_0.core_nwb_misc import Units from ...core.v2_4_0.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_4_0.core_nwb_ophys import ImagingPlane -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + DynamicTable, + ElementIdentifiers, + VectorData, + VectorIndex, +) metamodel_version = "None" @@ -44,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py index d4ebcb3..0fd2950 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -227,6 +227,20 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -246,12 +260,12 @@ class CurrentClampSeries(PatchClampSeries): ) name: str = Field(...) - data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: str = Field( ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) @@ -319,12 +333,28 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IZeroClampSeries(CurrentClampSeries): @@ -479,6 +509,20 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["amperes"] = Field( "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -486,7 +530,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class VoltageClampSeries(PatchClampSeries): @@ -499,13 +545,13 @@ class VoltageClampSeries(PatchClampSeries): ) name: str = Field(...) - data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field( None, description="""Fast capacitance, in farads.""" ) capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field( None, description="""Slow capacitance, in farads.""" ) + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -577,27 +623,6 @@ class VoltageClampSeries(PatchClampSeries): ) -class VoltageClampSeriesData(ConfiguredBaseModel): - """ - Recorded current. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - unit: Literal["amperes"] = Field( - "amperes", - description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", - json_schema_extra={ - "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} - }, - ) - value: Any = Field(...) - - class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): """ Fast capacitance, in farads. @@ -650,6 +675,43 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): value: float = Field(...) +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["amperes"] = Field( + "amperes", + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): """ Resistance compensation bandwidth, in hertz. @@ -854,12 +916,28 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IntracellularElectrode(NWBContainer): @@ -909,15 +987,6 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: VectorData[NDArray[Any, int]] = Field( - ..., - description="""Sweep number of the PatchClampSeries in that row.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, - ) series: VectorData[NDArray[Any, PatchClampSeries]] = Field( ..., description="""The PatchClampSeries with the sweep number in that row.""", @@ -939,6 +1008,15 @@ class SweepTable(DynamicTable): } }, ) + sweep_number: VectorData[NDArray[Any, int]] = Field( + ..., + description="""Sweep number of the PatchClampSeries in that row.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -1120,12 +1198,12 @@ class IntracellularRecordingsTable(AlignedDynamicTable): electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" ) - stimuli: IntracellularStimuliTable = Field( - ..., description="""Table for storing intracellular stimulus related metadata.""" - ) responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) + stimuli: IntracellularStimuliTable = Field( + ..., description="""Table for storing intracellular stimulus related metadata.""" + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) @@ -1465,9 +1543,9 @@ IZeroClampSeries.model_rebuild() CurrentClampStimulusSeries.model_rebuild() CurrentClampStimulusSeriesData.model_rebuild() VoltageClampSeries.model_rebuild() -VoltageClampSeriesData.model_rebuild() VoltageClampSeriesCapacitanceFast.model_rebuild() VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesData.model_rebuild() VoltageClampSeriesResistanceCompBandwidth.model_rebuild() VoltageClampSeriesResistanceCompCorrection.model_rebuild() VoltageClampSeriesResistanceCompPrediction.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py index 8fd3288..10a1d92 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -198,9 +198,7 @@ class ImageSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -214,8 +212,9 @@ class ImageSeries(TimeSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -263,6 +262,43 @@ class ImageSeries(TimeSeries): ) +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] + ] = Field(None) + + class ImageSeriesExternalFile(ConfiguredBaseModel): """ Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. @@ -304,9 +340,7 @@ class ImageMaskSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -320,8 +354,9 @@ class ImageMaskSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -379,6 +414,9 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) + data: OpticalSeriesData = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) @@ -387,10 +425,6 @@ class OpticalSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -405,8 +439,9 @@ class OpticalSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -454,6 +489,43 @@ class OpticalSeries(ImageSeries): ) +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + ] + ] = Field(None) + + class IndexSeries(TimeSeries): """ Stores indices to image frames stored in an ImageSeries. The purpose of the ImageIndexSeries is to allow a static image stack to be stored somewhere, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced ImageSeries, and the timestamps array indicates when that image was displayed. @@ -464,10 +536,8 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Index of the frame in the referenced ImageSeries.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IndexSeriesData = Field( + ..., description="""Index of the frame in the referenced ImageSeries.""" ) indexed_timeseries: Union[ImageSeries, str] = Field( ..., @@ -515,13 +585,50 @@ class IndexSeries(TimeSeries): ) +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the frame in the referenced ImageSeries. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model GrayscaleImage.model_rebuild() RGBImage.model_rebuild() RGBAImage.model_rebuild() ImageSeries.model_rebuild() +ImageSeriesData.model_rebuild() ImageSeriesExternalFile.model_rebuild() ImageMaskSeries.model_rebuild() OpticalSeries.model_rebuild() +OpticalSeriesData.model_rebuild() IndexSeries.model_rebuild() +IndexSeriesData.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py index 3ab6b75..3ccb411 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -213,6 +213,20 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "see ", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", @@ -236,10 +250,8 @@ class AnnotationSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], str] = Field( - ..., - description="""Annotations made during an experiment.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: AnnotationSeriesData = Field( + ..., description="""Annotations made during an experiment.""" ) description: Optional[str] = Field( "no description", @@ -278,6 +290,43 @@ class AnnotationSeries(TimeSeries): ) +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], str]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class IntervalSeries(TimeSeries): """ Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. @@ -288,10 +337,8 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Use values >0 if interval started, <0 if interval ended.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IntervalSeriesData = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) description: Optional[str] = Field( "no description", @@ -330,6 +377,43 @@ class IntervalSeries(TimeSeries): ) +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class DecompositionSeries(TimeSeries): """ Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -417,6 +501,20 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -504,9 +602,18 @@ class Units(DynamicTable): ) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) - spike_times_index: Optional[Named[VectorIndex]] = Field( + electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( None, - description="""Index into the spike_times dataset.""", + description="""Electrode group that each spike unit came from.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) + electrodes: Optional[Named[DynamicTableRegion]] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -516,12 +623,9 @@ class Units(DynamicTable): } }, ) - spike_times: Optional[UnitsSpikeTimes] = Field( - None, description="""Spike times for each unit.""" - ) - obs_intervals_index: Optional[Named[VectorIndex]] = Field( + electrodes_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into the obs_intervals dataset.""", + description="""Index into electrodes.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -547,9 +651,9 @@ class Units(DynamicTable): }, ) ) - electrodes_index: Optional[Named[VectorIndex]] = Field( + obs_intervals_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into electrodes.""", + description="""Index into the obs_intervals dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -559,9 +663,12 @@ class Units(DynamicTable): } }, ) - electrodes: Optional[Named[DynamicTableRegion]] = Field( + spike_times: Optional[UnitsSpikeTimes] = Field( + None, description="""Spike times for each unit.""" + ) + spike_times_index: Optional[Named[VectorIndex]] = Field( None, - description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + description="""Index into the spike_times dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -571,41 +678,15 @@ class Units(DynamicTable): } }, ) - electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( - None, - description="""Electrode group that each spike unit came from.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, + waveform_mean: Optional[UnitsWaveformMean] = Field( + None, description="""Spike waveform mean for each spike unit.""" ) - waveform_mean: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( - Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, - ) + waveform_sd: Optional[UnitsWaveformSd] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[UnitsWaveforms] = Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", ) waveforms_index: Optional[Named[VectorIndex]] = Field( None, @@ -671,14 +752,109 @@ class UnitsSpikeTimes(VectorData): ] = Field(None) +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_mean"] = Field( + "waveform_mean", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_sd"] = Field( + "waveform_sd", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveforms"] = Field( + "waveforms", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model AbstractFeatureSeries.model_rebuild() AbstractFeatureSeriesData.model_rebuild() AnnotationSeries.model_rebuild() +AnnotationSeriesData.model_rebuild() IntervalSeries.model_rebuild() +IntervalSeriesData.model_rebuild() DecompositionSeries.model_rebuild() DecompositionSeriesData.model_rebuild() DecompositionSeriesBands.model_rebuild() Units.model_rebuild() UnitsSpikeTimes.model_rebuild() +UnitsWaveformMean.model_rebuild() +UnitsWaveformSd.model_rebuild() +UnitsWaveforms.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 350a398..bddf0ea 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -121,10 +121,8 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], float] = Field( - ..., - description="""Applied power for optogenetic stimulus, in watts.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: OptogeneticSeriesData = Field( + ..., description="""Applied power for optogenetic stimulus, in watts.""" ) site: Union[OptogeneticStimulusSite, str] = Field( ..., @@ -172,6 +170,41 @@ class OptogeneticSeries(TimeSeries): ) +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["watts"] = Field( + "watts", + description="""Unit of measurement for data, which is fixed to 'watts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class OptogeneticStimulusSite(NWBContainer): """ A site of optogenetic stimulation. @@ -202,4 +235,5 @@ class OptogeneticStimulusSite(NWBContainer): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model OptogeneticSeries.model_rebuild() +OptogeneticSeriesData.model_rebuild() OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 9f6e191..73f8fcd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -27,7 +27,7 @@ from ...core.v2_4_0.core_nwb_base import ( TimeSeriesSync, ) from ...core.v2_4_0.core_nwb_device import Device -from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile +from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile from ...hdmf_common.v1_5_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -179,9 +179,7 @@ class TwoPhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -195,8 +193,9 @@ class TwoPhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -254,9 +253,7 @@ class RoiResponseSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field(..., description="""Signals from ROIs.""") + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", @@ -306,6 +303,42 @@ class RoiResponseSeries(TimeSeries): ) +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class DfOverF(NWBDataInterface): """ dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). @@ -372,6 +405,10 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) pixel_mask_index: Optional[Named[VectorIndex]] = Field( None, description="""Index into pixel_mask.""", @@ -384,9 +421,9 @@ class PlaneSegmentation(DynamicTable): } }, ) - pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( None, - description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) voxel_mask_index: Optional[Named[VectorIndex]] = Field( None, @@ -400,10 +437,6 @@ class PlaneSegmentation(DynamicTable): } }, ) - voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( - None, - description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", - ) reference_images: Optional[Dict[str, ImageSeries]] = Field( None, description="""Image stacks that the segmentation masks apply to.""", @@ -702,6 +735,7 @@ class CorrectedImageStack(NWBDataInterface): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TwoPhotonSeries.model_rebuild() RoiResponseSeries.model_rebuild() +RoiResponseSeriesData.model_rebuild() DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index ffc194e..60f8b4c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py index 620dcf2..775c660 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py @@ -39,6 +39,7 @@ from ...core.v2_4_0.core_nwb_ecephys import ( ClusterWaveforms, Clustering, ElectricalSeries, + ElectricalSeriesData, ElectrodeGroup, ElectrodeGroupPosition, EventDetection, @@ -47,6 +48,7 @@ from ...core.v2_4_0.core_nwb_ecephys import ( FilteredEphys, LFP, SpikeEventSeries, + SpikeEventSeriesData, ) from ...core.v2_4_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries from ...core.v2_4_0.core_nwb_file import ( @@ -100,9 +102,12 @@ from ...core.v2_4_0.core_nwb_image import ( GrayscaleImage, ImageMaskSeries, ImageSeries, + ImageSeriesData, ImageSeriesExternalFile, IndexSeries, + IndexSeriesData, OpticalSeries, + OpticalSeriesData, RGBAImage, RGBImage, ) @@ -110,14 +115,23 @@ from ...core.v2_4_0.core_nwb_misc import ( AbstractFeatureSeries, AbstractFeatureSeriesData, AnnotationSeries, + AnnotationSeriesData, DecompositionSeries, DecompositionSeriesBands, DecompositionSeriesData, IntervalSeries, + IntervalSeriesData, Units, UnitsSpikeTimes, + UnitsWaveformMean, + UnitsWaveformSd, + UnitsWaveforms, +) +from ...core.v2_4_0.core_nwb_ogen import ( + OptogeneticSeries, + OptogeneticSeriesData, + OptogeneticStimulusSite, ) -from ...core.v2_4_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite from ...core.v2_4_0.core_nwb_ophys import ( CorrectedImageStack, DfOverF, @@ -133,6 +147,7 @@ from ...core.v2_4_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, RoiResponseSeries, + RoiResponseSeriesData, TwoPhotonSeries, ) from ...core.v2_4_0.core_nwb_retinotopy import ( @@ -174,7 +189,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py index 2db9763..7dcc142 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 89c1038..cfa3239 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -176,6 +176,24 @@ class SpatialSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py index e4cf279..b52ccd6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 91c2222..0fa24fc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -156,11 +156,12 @@ class ElectricalSeries(TimeSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) - data: Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], - ] = Field(..., description="""Recorded voltage data.""") + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -173,11 +174,6 @@ class ElectricalSeries(TimeSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -215,6 +211,49 @@ class ElectricalSeries(TimeSeries): ) +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class SpikeEventSeries(ElectricalSeries): """ Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -225,10 +264,7 @@ class SpikeEventSeries(ElectricalSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], - ] = Field(..., description="""Spike waveforms.""") + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", @@ -238,6 +274,11 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -250,11 +291,6 @@ class SpikeEventSeries(ElectricalSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -287,6 +323,48 @@ class SpikeEventSeries(ElectricalSeries): ) +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Unit of measurement for waveforms, which is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class FeatureExtraction(NWBDataInterface): """ Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. @@ -561,7 +639,9 @@ class Clustering(NWBDataInterface): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ElectricalSeries.model_rebuild() +ElectricalSeriesData.model_rebuild() SpikeEventSeries.model_rebuild() +SpikeEventSeriesData.model_rebuild() FeatureExtraction.model_rebuild() EventDetection.model_rebuild() EventWaveform.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py index ab92eb7..94de21c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py index 6c056a6..d0e41d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -34,7 +34,12 @@ from ...core.v2_5_0.core_nwb_icephys import ( from ...core.v2_5_0.core_nwb_misc import Units from ...core.v2_5_0.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_5_0.core_nwb_ophys import ImagingPlane -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + DynamicTable, + ElementIdentifiers, + VectorData, + VectorIndex, +) metamodel_version = "None" @@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py index b500a82..baaa066 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -227,6 +227,24 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -246,12 +264,12 @@ class CurrentClampSeries(PatchClampSeries): ) name: str = Field(...) - data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: str = Field( ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) @@ -319,12 +337,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IZeroClampSeries(CurrentClampSeries): @@ -479,6 +517,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["amperes"] = Field( "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -486,7 +542,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class VoltageClampSeries(PatchClampSeries): @@ -499,13 +557,13 @@ class VoltageClampSeries(PatchClampSeries): ) name: str = Field(...) - data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field( None, description="""Fast capacitance, in farads.""" ) capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field( None, description="""Slow capacitance, in farads.""" ) + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -577,27 +635,6 @@ class VoltageClampSeries(PatchClampSeries): ) -class VoltageClampSeriesData(ConfiguredBaseModel): - """ - Recorded current. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - unit: Literal["amperes"] = Field( - "amperes", - description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", - json_schema_extra={ - "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} - }, - ) - value: Any = Field(...) - - class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): """ Fast capacitance, in farads. @@ -650,6 +687,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): value: float = Field(...) +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["amperes"] = Field( + "amperes", + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): """ Resistance compensation bandwidth, in hertz. @@ -854,12 +932,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IntracellularElectrode(NWBContainer): @@ -910,15 +1008,6 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: VectorData[NDArray[Any, int]] = Field( - ..., - description="""Sweep number of the PatchClampSeries in that row.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, - ) series: VectorData[NDArray[Any, PatchClampSeries]] = Field( ..., description="""The PatchClampSeries with the sweep number in that row.""", @@ -940,6 +1029,15 @@ class SweepTable(DynamicTable): } }, ) + sweep_number: VectorData[NDArray[Any, int]] = Field( + ..., + description="""Sweep number of the PatchClampSeries in that row.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -1121,12 +1219,12 @@ class IntracellularRecordingsTable(AlignedDynamicTable): electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" ) - stimuli: IntracellularStimuliTable = Field( - ..., description="""Table for storing intracellular stimulus related metadata.""" - ) responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) + stimuli: IntracellularStimuliTable = Field( + ..., description="""Table for storing intracellular stimulus related metadata.""" + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) @@ -1466,9 +1564,9 @@ IZeroClampSeries.model_rebuild() CurrentClampStimulusSeries.model_rebuild() CurrentClampStimulusSeriesData.model_rebuild() VoltageClampSeries.model_rebuild() -VoltageClampSeriesData.model_rebuild() VoltageClampSeriesCapacitanceFast.model_rebuild() VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesData.model_rebuild() VoltageClampSeriesResistanceCompBandwidth.model_rebuild() VoltageClampSeriesResistanceCompCorrection.model_rebuild() VoltageClampSeriesResistanceCompPrediction.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py index 520d249..4b9edee 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -204,9 +204,7 @@ class ImageSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -220,8 +218,9 @@ class ImageSeries(TimeSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -269,6 +268,47 @@ class ImageSeries(TimeSeries): ) +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] + ] = Field(None) + + class ImageSeriesExternalFile(ConfiguredBaseModel): """ Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. @@ -310,9 +350,7 @@ class ImageMaskSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -326,8 +364,9 @@ class ImageMaskSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -385,6 +424,9 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) + data: OpticalSeriesData = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) @@ -393,10 +435,6 @@ class OpticalSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -411,8 +449,9 @@ class OpticalSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -460,6 +499,47 @@ class OpticalSeries(ImageSeries): ) +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + ] + ] = Field(None) + + class IndexSeries(TimeSeries): """ Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. @@ -470,10 +550,8 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Index of the image (using zero-indexing) in the linked Images object.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IndexSeriesData = Field( + ..., description="""Index of the image (using zero-indexing) in the linked Images object.""" ) indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( None, @@ -530,13 +608,52 @@ class IndexSeries(TimeSeries): ) +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field( + -1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["N/A"] = Field( + "N/A", + description="""This field is unused by IndexSeries and has the value N/A.""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model GrayscaleImage.model_rebuild() RGBImage.model_rebuild() RGBAImage.model_rebuild() ImageSeries.model_rebuild() +ImageSeriesData.model_rebuild() ImageSeriesExternalFile.model_rebuild() ImageMaskSeries.model_rebuild() OpticalSeries.model_rebuild() +OpticalSeriesData.model_rebuild() IndexSeries.model_rebuild() +IndexSeriesData.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py index 7901288..5646cd8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -213,6 +213,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "see ", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", @@ -236,10 +254,8 @@ class AnnotationSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], str] = Field( - ..., - description="""Annotations made during an experiment.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: AnnotationSeriesData = Field( + ..., description="""Annotations made during an experiment.""" ) description: Optional[str] = Field( "no description", @@ -278,6 +294,47 @@ class AnnotationSeries(TimeSeries): ) +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], str]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class IntervalSeries(TimeSeries): """ Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. @@ -288,10 +345,8 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Use values >0 if interval started, <0 if interval ended.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IntervalSeriesData = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) description: Optional[str] = Field( "no description", @@ -330,6 +385,47 @@ class IntervalSeries(TimeSeries): ) +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class DecompositionSeries(TimeSeries): """ Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -417,6 +513,24 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -504,9 +618,18 @@ class Units(DynamicTable): ) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) - spike_times_index: Optional[Named[VectorIndex]] = Field( + electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( None, - description="""Index into the spike_times dataset.""", + description="""Electrode group that each spike unit came from.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) + electrodes: Optional[Named[DynamicTableRegion]] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -516,12 +639,9 @@ class Units(DynamicTable): } }, ) - spike_times: Optional[UnitsSpikeTimes] = Field( - None, description="""Spike times for each unit.""" - ) - obs_intervals_index: Optional[Named[VectorIndex]] = Field( + electrodes_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into the obs_intervals dataset.""", + description="""Index into electrodes.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -547,9 +667,9 @@ class Units(DynamicTable): }, ) ) - electrodes_index: Optional[Named[VectorIndex]] = Field( + obs_intervals_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into electrodes.""", + description="""Index into the obs_intervals dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -559,9 +679,12 @@ class Units(DynamicTable): } }, ) - electrodes: Optional[Named[DynamicTableRegion]] = Field( + spike_times: Optional[UnitsSpikeTimes] = Field( + None, description="""Spike times for each unit.""" + ) + spike_times_index: Optional[Named[VectorIndex]] = Field( None, - description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + description="""Index into the spike_times dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -571,41 +694,15 @@ class Units(DynamicTable): } }, ) - electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( - None, - description="""Electrode group that each spike unit came from.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, + waveform_mean: Optional[UnitsWaveformMean] = Field( + None, description="""Spike waveform mean for each spike unit.""" ) - waveform_mean: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( - Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, - ) + waveform_sd: Optional[UnitsWaveformSd] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[UnitsWaveforms] = Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", ) waveforms_index: Optional[Named[VectorIndex]] = Field( None, @@ -671,14 +768,109 @@ class UnitsSpikeTimes(VectorData): ] = Field(None) +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_mean"] = Field( + "waveform_mean", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_sd"] = Field( + "waveform_sd", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveforms"] = Field( + "waveforms", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model AbstractFeatureSeries.model_rebuild() AbstractFeatureSeriesData.model_rebuild() AnnotationSeries.model_rebuild() +AnnotationSeriesData.model_rebuild() IntervalSeries.model_rebuild() +IntervalSeriesData.model_rebuild() DecompositionSeries.model_rebuild() DecompositionSeriesData.model_rebuild() DecompositionSeriesBands.model_rebuild() Units.model_rebuild() UnitsSpikeTimes.model_rebuild() +UnitsWaveformMean.model_rebuild() +UnitsWaveformSd.model_rebuild() +UnitsWaveforms.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py index e39e6b2..29938d6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -121,10 +121,8 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], float] = Field( - ..., - description="""Applied power for optogenetic stimulus, in watts.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: OptogeneticSeriesData = Field( + ..., description="""Applied power for optogenetic stimulus, in watts.""" ) site: Union[OptogeneticStimulusSite, str] = Field( ..., @@ -172,6 +170,45 @@ class OptogeneticSeries(TimeSeries): ) +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["watts"] = Field( + "watts", + description="""Unit of measurement for data, which is fixed to 'watts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class OptogeneticStimulusSite(NWBContainer): """ A site of optogenetic stimulation. @@ -202,4 +239,5 @@ class OptogeneticStimulusSite(NWBContainer): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model OptogeneticSeries.model_rebuild() +OptogeneticSeriesData.model_rebuild() OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 6107f4c..ba37576 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -27,7 +27,7 @@ from ...core.v2_5_0.core_nwb_base import ( TimeSeriesSync, ) from ...core.v2_5_0.core_nwb_device import Device -from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile +from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile from ...hdmf_common.v1_5_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -179,9 +179,7 @@ class TwoPhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -195,8 +193,9 @@ class TwoPhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -254,9 +253,7 @@ class RoiResponseSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field(..., description="""Signals from ROIs.""") + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", @@ -306,6 +303,46 @@ class RoiResponseSeries(TimeSeries): ) +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class DfOverF(NWBDataInterface): """ dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). @@ -372,6 +409,10 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) pixel_mask_index: Optional[Named[VectorIndex]] = Field( None, description="""Index into pixel_mask.""", @@ -384,9 +425,9 @@ class PlaneSegmentation(DynamicTable): } }, ) - pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( None, - description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) voxel_mask_index: Optional[Named[VectorIndex]] = Field( None, @@ -400,10 +441,6 @@ class PlaneSegmentation(DynamicTable): } }, ) - voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( - None, - description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", - ) reference_images: Optional[Dict[str, ImageSeries]] = Field( None, description="""Image stacks that the segmentation masks apply to.""", @@ -702,6 +739,7 @@ class CorrectedImageStack(NWBDataInterface): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TwoPhotonSeries.model_rebuild() RoiResponseSeries.model_rebuild() +RoiResponseSeriesData.model_rebuild() DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index b72f7b4..82f1e30 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py index 7aaa8a2..ff571fa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py @@ -40,6 +40,7 @@ from ...core.v2_5_0.core_nwb_ecephys import ( ClusterWaveforms, Clustering, ElectricalSeries, + ElectricalSeriesData, ElectrodeGroup, ElectrodeGroupPosition, EventDetection, @@ -48,6 +49,7 @@ from ...core.v2_5_0.core_nwb_ecephys import ( FilteredEphys, LFP, SpikeEventSeries, + SpikeEventSeriesData, ) from ...core.v2_5_0.core_nwb_epoch import TimeIntervals from ...core.v2_5_0.core_nwb_file import ( @@ -101,9 +103,12 @@ from ...core.v2_5_0.core_nwb_image import ( GrayscaleImage, ImageMaskSeries, ImageSeries, + ImageSeriesData, ImageSeriesExternalFile, IndexSeries, + IndexSeriesData, OpticalSeries, + OpticalSeriesData, RGBAImage, RGBImage, ) @@ -111,14 +116,23 @@ from ...core.v2_5_0.core_nwb_misc import ( AbstractFeatureSeries, AbstractFeatureSeriesData, AnnotationSeries, + AnnotationSeriesData, DecompositionSeries, DecompositionSeriesBands, DecompositionSeriesData, IntervalSeries, + IntervalSeriesData, Units, UnitsSpikeTimes, + UnitsWaveformMean, + UnitsWaveformSd, + UnitsWaveforms, +) +from ...core.v2_5_0.core_nwb_ogen import ( + OptogeneticSeries, + OptogeneticSeriesData, + OptogeneticStimulusSite, ) -from ...core.v2_5_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite from ...core.v2_5_0.core_nwb_ophys import ( CorrectedImageStack, DfOverF, @@ -134,6 +148,7 @@ from ...core.v2_5_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, RoiResponseSeries, + RoiResponseSeriesData, TwoPhotonSeries, ) from ...core.v2_5_0.core_nwb_retinotopy import ( @@ -175,7 +190,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 4837ae7..1881de9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 7e4ad59..9a19768 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -176,6 +176,24 @@ class SpatialSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index c57186b..2792e91 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 0529035..0e59233 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -156,11 +156,12 @@ class ElectricalSeries(TimeSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) - data: Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], - ] = Field(..., description="""Recorded voltage data.""") + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -173,11 +174,6 @@ class ElectricalSeries(TimeSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -215,6 +211,49 @@ class ElectricalSeries(TimeSeries): ) +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class SpikeEventSeries(ElectricalSeries): """ Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -225,10 +264,7 @@ class SpikeEventSeries(ElectricalSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], - ] = Field(..., description="""Spike waveforms.""") + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", @@ -238,6 +274,11 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -250,11 +291,6 @@ class SpikeEventSeries(ElectricalSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -287,6 +323,48 @@ class SpikeEventSeries(ElectricalSeries): ) +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Unit of measurement for waveforms, which is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class FeatureExtraction(NWBDataInterface): """ Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. @@ -561,7 +639,9 @@ class Clustering(NWBDataInterface): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ElectricalSeries.model_rebuild() +ElectricalSeriesData.model_rebuild() SpikeEventSeries.model_rebuild() +SpikeEventSeriesData.model_rebuild() FeatureExtraction.model_rebuild() EventDetection.model_rebuild() EventWaveform.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index d3fa53b..fca71ae 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 975e51c..aaea52c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -34,7 +34,12 @@ from ...core.v2_6_0_alpha.core_nwb_icephys import ( from ...core.v2_6_0_alpha.core_nwb_misc import Units from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_6_0_alpha.core_nwb_ophys import ImagingPlane -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + DynamicTable, + ElementIdentifiers, + VectorData, + VectorIndex, +) metamodel_version = "None" @@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 1f9c04b..17e121f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -227,6 +227,24 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -246,12 +264,12 @@ class CurrentClampSeries(PatchClampSeries): ) name: str = Field(...) - data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: str = Field( ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) @@ -319,12 +337,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IZeroClampSeries(CurrentClampSeries): @@ -479,6 +517,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["amperes"] = Field( "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -486,7 +542,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class VoltageClampSeries(PatchClampSeries): @@ -499,13 +557,13 @@ class VoltageClampSeries(PatchClampSeries): ) name: str = Field(...) - data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field( None, description="""Fast capacitance, in farads.""" ) capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field( None, description="""Slow capacitance, in farads.""" ) + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -577,27 +635,6 @@ class VoltageClampSeries(PatchClampSeries): ) -class VoltageClampSeriesData(ConfiguredBaseModel): - """ - Recorded current. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - unit: Literal["amperes"] = Field( - "amperes", - description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", - json_schema_extra={ - "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} - }, - ) - value: Any = Field(...) - - class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): """ Fast capacitance, in farads. @@ -650,6 +687,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): value: float = Field(...) +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["amperes"] = Field( + "amperes", + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): """ Resistance compensation bandwidth, in hertz. @@ -854,12 +932,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IntracellularElectrode(NWBContainer): @@ -910,15 +1008,6 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: VectorData[NDArray[Any, int]] = Field( - ..., - description="""Sweep number of the PatchClampSeries in that row.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, - ) series: VectorData[NDArray[Any, PatchClampSeries]] = Field( ..., description="""The PatchClampSeries with the sweep number in that row.""", @@ -940,6 +1029,15 @@ class SweepTable(DynamicTable): } }, ) + sweep_number: VectorData[NDArray[Any, int]] = Field( + ..., + description="""Sweep number of the PatchClampSeries in that row.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -1121,12 +1219,12 @@ class IntracellularRecordingsTable(AlignedDynamicTable): electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" ) - stimuli: IntracellularStimuliTable = Field( - ..., description="""Table for storing intracellular stimulus related metadata.""" - ) responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) + stimuli: IntracellularStimuliTable = Field( + ..., description="""Table for storing intracellular stimulus related metadata.""" + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) @@ -1466,9 +1564,9 @@ IZeroClampSeries.model_rebuild() CurrentClampStimulusSeries.model_rebuild() CurrentClampStimulusSeriesData.model_rebuild() VoltageClampSeries.model_rebuild() -VoltageClampSeriesData.model_rebuild() VoltageClampSeriesCapacitanceFast.model_rebuild() VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesData.model_rebuild() VoltageClampSeriesResistanceCompBandwidth.model_rebuild() VoltageClampSeriesResistanceCompCorrection.model_rebuild() VoltageClampSeriesResistanceCompPrediction.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index af69abe..e8d4430 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -204,9 +204,7 @@ class ImageSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -220,8 +218,9 @@ class ImageSeries(TimeSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -269,6 +268,47 @@ class ImageSeries(TimeSeries): ) +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] + ] = Field(None) + + class ImageSeriesExternalFile(ConfiguredBaseModel): """ Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. @@ -310,9 +350,7 @@ class ImageMaskSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -326,8 +364,9 @@ class ImageMaskSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -385,6 +424,9 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) + data: OpticalSeriesData = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) @@ -393,10 +435,6 @@ class OpticalSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -411,8 +449,9 @@ class OpticalSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -460,6 +499,47 @@ class OpticalSeries(ImageSeries): ) +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + ] + ] = Field(None) + + class IndexSeries(TimeSeries): """ Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. @@ -470,10 +550,8 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Index of the image (using zero-indexing) in the linked Images object.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IndexSeriesData = Field( + ..., description="""Index of the image (using zero-indexing) in the linked Images object.""" ) indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( None, @@ -530,13 +608,52 @@ class IndexSeries(TimeSeries): ) +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field( + -1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["N/A"] = Field( + "N/A", + description="""This field is unused by IndexSeries and has the value N/A.""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model GrayscaleImage.model_rebuild() RGBImage.model_rebuild() RGBAImage.model_rebuild() ImageSeries.model_rebuild() +ImageSeriesData.model_rebuild() ImageSeriesExternalFile.model_rebuild() ImageMaskSeries.model_rebuild() OpticalSeries.model_rebuild() +OpticalSeriesData.model_rebuild() IndexSeries.model_rebuild() +IndexSeriesData.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 5c28736..4278604 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -213,6 +213,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "see ", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", @@ -236,10 +254,8 @@ class AnnotationSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], str] = Field( - ..., - description="""Annotations made during an experiment.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: AnnotationSeriesData = Field( + ..., description="""Annotations made during an experiment.""" ) description: Optional[str] = Field( "no description", @@ -278,6 +294,47 @@ class AnnotationSeries(TimeSeries): ) +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], str]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class IntervalSeries(TimeSeries): """ Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. @@ -288,10 +345,8 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Use values >0 if interval started, <0 if interval ended.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IntervalSeriesData = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) description: Optional[str] = Field( "no description", @@ -330,6 +385,47 @@ class IntervalSeries(TimeSeries): ) +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class DecompositionSeries(TimeSeries): """ Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -417,6 +513,24 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -504,9 +618,18 @@ class Units(DynamicTable): ) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) - spike_times_index: Optional[Named[VectorIndex]] = Field( + electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( None, - description="""Index into the spike_times dataset.""", + description="""Electrode group that each spike unit came from.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) + electrodes: Optional[Named[DynamicTableRegion]] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -516,12 +639,9 @@ class Units(DynamicTable): } }, ) - spike_times: Optional[UnitsSpikeTimes] = Field( - None, description="""Spike times for each unit in seconds.""" - ) - obs_intervals_index: Optional[Named[VectorIndex]] = Field( + electrodes_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into the obs_intervals dataset.""", + description="""Index into electrodes.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -547,9 +667,9 @@ class Units(DynamicTable): }, ) ) - electrodes_index: Optional[Named[VectorIndex]] = Field( + obs_intervals_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into electrodes.""", + description="""Index into the obs_intervals dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -559,9 +679,12 @@ class Units(DynamicTable): } }, ) - electrodes: Optional[Named[DynamicTableRegion]] = Field( + spike_times: Optional[UnitsSpikeTimes] = Field( + None, description="""Spike times for each unit in seconds.""" + ) + spike_times_index: Optional[Named[VectorIndex]] = Field( None, - description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + description="""Index into the spike_times dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -571,41 +694,15 @@ class Units(DynamicTable): } }, ) - electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( - None, - description="""Electrode group that each spike unit came from.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, + waveform_mean: Optional[UnitsWaveformMean] = Field( + None, description="""Spike waveform mean for each spike unit.""" ) - waveform_mean: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( - Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, - ) + waveform_sd: Optional[UnitsWaveformSd] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[UnitsWaveforms] = Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", ) waveforms_index: Optional[Named[VectorIndex]] = Field( None, @@ -671,14 +768,109 @@ class UnitsSpikeTimes(VectorData): ] = Field(None) +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_mean"] = Field( + "waveform_mean", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_sd"] = Field( + "waveform_sd", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveforms"] = Field( + "waveforms", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model AbstractFeatureSeries.model_rebuild() AbstractFeatureSeriesData.model_rebuild() AnnotationSeries.model_rebuild() +AnnotationSeriesData.model_rebuild() IntervalSeries.model_rebuild() +IntervalSeriesData.model_rebuild() DecompositionSeries.model_rebuild() DecompositionSeriesData.model_rebuild() DecompositionSeriesBands.model_rebuild() Units.model_rebuild() UnitsSpikeTimes.model_rebuild() +UnitsWaveformMean.model_rebuild() +UnitsWaveformSd.model_rebuild() +UnitsWaveforms.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 42fe82f..d94f420 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -121,10 +121,8 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], float] = Field( - ..., - description="""Applied power for optogenetic stimulus, in watts.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: OptogeneticSeriesData = Field( + ..., description="""Applied power for optogenetic stimulus, in watts.""" ) site: Union[OptogeneticStimulusSite, str] = Field( ..., @@ -172,6 +170,45 @@ class OptogeneticSeries(TimeSeries): ) +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["watts"] = Field( + "watts", + description="""Unit of measurement for data, which is fixed to 'watts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class OptogeneticStimulusSite(NWBContainer): """ A site of optogenetic stimulation. @@ -202,4 +239,5 @@ class OptogeneticStimulusSite(NWBContainer): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model OptogeneticSeries.model_rebuild() +OptogeneticSeriesData.model_rebuild() OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index f6acd6c..d9ba753 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -27,7 +27,11 @@ from ...core.v2_6_0_alpha.core_nwb_base import ( TimeSeriesSync, ) from ...core.v2_6_0_alpha.core_nwb_device import Device -from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile +from ...core.v2_6_0_alpha.core_nwb_image import ( + ImageSeries, + ImageSeriesData, + ImageSeriesExternalFile, +) from ...hdmf_common.v1_5_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, @@ -45,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -160,21 +164,21 @@ class OnePhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[float] = Field( - None, - description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + binning: Optional[int] = Field( + None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" ) exposure_time: Optional[float] = Field( None, description="""Exposure time of the sample; often the inverse of the frequency.""" ) - binning: Optional[int] = Field( - None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" - ) - power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + scan_line_rate: Optional[float] = Field( + None, + description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + ) imaging_plane: Union[ImagingPlane, str] = Field( ..., json_schema_extra={ @@ -184,9 +188,7 @@ class OnePhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -200,8 +202,9 @@ class OnePhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -278,9 +281,7 @@ class TwoPhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -294,8 +295,9 @@ class TwoPhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -353,9 +355,7 @@ class RoiResponseSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field(..., description="""Signals from ROIs.""") + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", @@ -405,6 +405,46 @@ class RoiResponseSeries(TimeSeries): ) +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class DfOverF(NWBDataInterface): """ dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). @@ -471,6 +511,10 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) pixel_mask_index: Optional[Named[VectorIndex]] = Field( None, description="""Index into pixel_mask.""", @@ -483,9 +527,9 @@ class PlaneSegmentation(DynamicTable): } }, ) - pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( None, - description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) voxel_mask_index: Optional[Named[VectorIndex]] = Field( None, @@ -499,10 +543,6 @@ class PlaneSegmentation(DynamicTable): } }, ) - voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( - None, - description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", - ) reference_images: Optional[Dict[str, ImageSeries]] = Field( None, description="""Image stacks that the segmentation masks apply to.""", @@ -802,6 +842,7 @@ class CorrectedImageStack(NWBDataInterface): OnePhotonSeries.model_rebuild() TwoPhotonSeries.model_rebuild() RoiResponseSeries.model_rebuild() +RoiResponseSeriesData.model_rebuild() DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index 3a085f7..f0ac913 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py index 21b7046..df0db47 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -40,6 +40,7 @@ from ...core.v2_6_0_alpha.core_nwb_ecephys import ( ClusterWaveforms, Clustering, ElectricalSeries, + ElectricalSeriesData, ElectrodeGroup, ElectrodeGroupPosition, EventDetection, @@ -48,6 +49,7 @@ from ...core.v2_6_0_alpha.core_nwb_ecephys import ( FilteredEphys, LFP, SpikeEventSeries, + SpikeEventSeriesData, ) from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals from ...core.v2_6_0_alpha.core_nwb_file import ( @@ -102,9 +104,12 @@ from ...core.v2_6_0_alpha.core_nwb_image import ( GrayscaleImage, ImageMaskSeries, ImageSeries, + ImageSeriesData, ImageSeriesExternalFile, IndexSeries, + IndexSeriesData, OpticalSeries, + OpticalSeriesData, RGBAImage, RGBImage, ) @@ -112,14 +117,23 @@ from ...core.v2_6_0_alpha.core_nwb_misc import ( AbstractFeatureSeries, AbstractFeatureSeriesData, AnnotationSeries, + AnnotationSeriesData, DecompositionSeries, DecompositionSeriesBands, DecompositionSeriesData, IntervalSeries, + IntervalSeriesData, Units, UnitsSpikeTimes, + UnitsWaveformMean, + UnitsWaveformSd, + UnitsWaveforms, +) +from ...core.v2_6_0_alpha.core_nwb_ogen import ( + OptogeneticSeries, + OptogeneticSeriesData, + OptogeneticStimulusSite, ) -from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite from ...core.v2_6_0_alpha.core_nwb_ophys import ( CorrectedImageStack, DfOverF, @@ -136,6 +150,7 @@ from ...core.v2_6_0_alpha.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, RoiResponseSeries, + RoiResponseSeriesData, TwoPhotonSeries, ) from ...core.v2_6_0_alpha.core_nwb_retinotopy import ( @@ -177,7 +192,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py index a645a2f..5a67fdf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py index 836c2e2..a67672a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -176,6 +176,24 @@ class SpatialSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py index 59b53c8..8a24ab1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index dc96a98..195633b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -156,11 +156,12 @@ class ElectricalSeries(TimeSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) - data: Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], - ] = Field(..., description="""Recorded voltage data.""") + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) + data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -173,11 +174,6 @@ class ElectricalSeries(TimeSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -215,6 +211,49 @@ class ElectricalSeries(TimeSeries): ) +class ElectricalSeriesData(ConfiguredBaseModel): + """ + Recorded voltage data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class SpikeEventSeries(ElectricalSeries): """ Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -225,10 +264,7 @@ class SpikeEventSeries(ElectricalSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], - ] = Field(..., description="""Spike waveforms.""") + data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""") timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", @@ -238,6 +274,11 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, + ) electrodes: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", @@ -250,11 +291,6 @@ class SpikeEventSeries(ElectricalSeries): } }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( - None, - description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, - ) description: Optional[str] = Field( "no description", description="""Description of the time series.""", @@ -287,6 +323,48 @@ class SpikeEventSeries(ElectricalSeries): ) +class SpikeEventSeriesData(ConfiguredBaseModel): + """ + Spike waveforms. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["volts"] = Field( + "volts", + description="""Unit of measurement for waveforms, which is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + ] + ] = Field(None) + + class FeatureExtraction(NWBDataInterface): """ Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. @@ -561,7 +639,9 @@ class Clustering(NWBDataInterface): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ElectricalSeries.model_rebuild() +ElectricalSeriesData.model_rebuild() SpikeEventSeries.model_rebuild() +SpikeEventSeriesData.model_rebuild() FeatureExtraction.model_rebuild() EventDetection.model_rebuild() EventWaveform.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py index e8b5539..eb514b2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py index 038a4ae..b21e028 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -34,7 +34,12 @@ from ...core.v2_7_0.core_nwb_icephys import ( from ...core.v2_7_0.core_nwb_misc import Units from ...core.v2_7_0.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_7_0.core_nwb_ophys import ImagingPlane -from ...hdmf_common.v1_8_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + DynamicTable, + ElementIdentifiers, + VectorData, + VectorIndex, +) metamodel_version = "None" @@ -45,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py index c1818b4..3cb28da 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -227,6 +227,24 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -246,12 +264,12 @@ class CurrentClampSeries(PatchClampSeries): ) name: str = Field(...) - data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) + data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: str = Field( ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) @@ -319,12 +337,32 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IZeroClampSeries(CurrentClampSeries): @@ -479,6 +517,24 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["amperes"] = Field( "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", @@ -486,7 +542,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class VoltageClampSeries(PatchClampSeries): @@ -499,13 +557,13 @@ class VoltageClampSeries(PatchClampSeries): ) name: str = Field(...) - data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field( None, description="""Fast capacitance, in farads.""" ) capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field( None, description="""Slow capacitance, in farads.""" ) + data: VoltageClampSeriesData = Field(..., description="""Recorded current.""") resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -577,27 +635,6 @@ class VoltageClampSeries(PatchClampSeries): ) -class VoltageClampSeriesData(ConfiguredBaseModel): - """ - Recorded current. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - unit: Literal["amperes"] = Field( - "amperes", - description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", - json_schema_extra={ - "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} - }, - ) - value: Any = Field(...) - - class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): """ Fast capacitance, in farads. @@ -650,6 +687,47 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): value: float = Field(...) +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["amperes"] = Field( + "amperes", + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, + ) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): """ Resistance compensation bandwidth, in hertz. @@ -854,12 +932,32 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Literal["volts"] = Field( "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Any = Field(...) + value: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) class IntracellularElectrode(NWBContainer): @@ -910,15 +1008,6 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: VectorData[NDArray[Any, int]] = Field( - ..., - description="""Sweep number of the PatchClampSeries in that row.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, - ) series: VectorData[NDArray[Any, PatchClampSeries]] = Field( ..., description="""The PatchClampSeries with the sweep number in that row.""", @@ -940,6 +1029,15 @@ class SweepTable(DynamicTable): } }, ) + sweep_number: VectorData[NDArray[Any, int]] = Field( + ..., + description="""Sweep number of the PatchClampSeries in that row.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -1133,12 +1231,12 @@ class IntracellularRecordingsTable(AlignedDynamicTable): electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" ) - stimuli: IntracellularStimuliTable = Field( - ..., description="""Table for storing intracellular stimulus related metadata.""" - ) responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) + stimuli: IntracellularStimuliTable = Field( + ..., description="""Table for storing intracellular stimulus related metadata.""" + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) @@ -1478,9 +1576,9 @@ IZeroClampSeries.model_rebuild() CurrentClampStimulusSeries.model_rebuild() CurrentClampStimulusSeriesData.model_rebuild() VoltageClampSeries.model_rebuild() -VoltageClampSeriesData.model_rebuild() VoltageClampSeriesCapacitanceFast.model_rebuild() VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesData.model_rebuild() VoltageClampSeriesResistanceCompBandwidth.model_rebuild() VoltageClampSeriesResistanceCompCorrection.model_rebuild() VoltageClampSeriesResistanceCompPrediction.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py index 6e97172..263bf5b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -204,9 +204,7 @@ class ImageSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -220,8 +218,9 @@ class ImageSeries(TimeSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -269,6 +268,47 @@ class ImageSeries(TimeSeries): ) +class ImageSeriesData(ConfiguredBaseModel): + """ + Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] + ] = Field(None) + + class ImageSeriesExternalFile(ConfiguredBaseModel): """ Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. @@ -310,9 +350,7 @@ class ImageMaskSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -326,8 +364,9 @@ class ImageMaskSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -385,6 +424,9 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) + data: OpticalSeriesData = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) @@ -393,10 +435,6 @@ class OpticalSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -411,8 +449,9 @@ class OpticalSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -460,6 +499,47 @@ class OpticalSeries(ImageSeries): ) +class OpticalSeriesData(ConfiguredBaseModel): + """ + Images presented to subject, either grayscale or RGB + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + ] + ] = Field(None) + + class IndexSeries(TimeSeries): """ Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. @@ -470,10 +550,8 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Index of the image (using zero-indexing) in the linked Images object.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IndexSeriesData = Field( + ..., description="""Index of the image (using zero-indexing) in the linked Images object.""" ) indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( None, @@ -530,13 +608,52 @@ class IndexSeries(TimeSeries): ) +class IndexSeriesData(ConfiguredBaseModel): + """ + Index of the image (using zero-indexing) in the linked Images object. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field(None, description="""This field is unused by IndexSeries.""") + resolution: Optional[float] = Field( + -1.0, + description="""This field is unused by IndexSeries.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["N/A"] = Field( + "N/A", + description="""This field is unused by IndexSeries and has the value N/A.""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model GrayscaleImage.model_rebuild() RGBImage.model_rebuild() RGBAImage.model_rebuild() ImageSeries.model_rebuild() +ImageSeriesData.model_rebuild() ImageSeriesExternalFile.model_rebuild() ImageMaskSeries.model_rebuild() OpticalSeries.model_rebuild() +OpticalSeriesData.model_rebuild() IndexSeries.model_rebuild() +IndexSeriesData.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py index 1eb2c3a..e1dbe0e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -213,6 +213,24 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: Optional[str] = Field( "see ", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", @@ -236,10 +254,8 @@ class AnnotationSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], str] = Field( - ..., - description="""Annotations made during an experiment.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: AnnotationSeriesData = Field( + ..., description="""Annotations made during an experiment.""" ) description: Optional[str] = Field( "no description", @@ -278,6 +294,47 @@ class AnnotationSeries(TimeSeries): ) +class AnnotationSeriesData(ConfiguredBaseModel): + """ + Annotations made during an experiment. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], str]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class IntervalSeries(TimeSeries): """ Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. @@ -288,10 +345,8 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], int] = Field( - ..., - description="""Use values >0 if interval started, <0 if interval ended.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, + data: IntervalSeriesData = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) description: Optional[str] = Field( "no description", @@ -330,6 +385,47 @@ class IntervalSeries(TimeSeries): ) +class IntervalSeriesData(ConfiguredBaseModel): + """ + Use values >0 if interval started, <0 if interval ended. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: float = Field( + -1.0, + description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""", + le=-1, + ge=-1, + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["n/a"] = Field( + "n/a", + description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""", + json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}}, + ) + value: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} + ) + + class DecompositionSeries(TimeSeries): """ Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -417,6 +513,24 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) unit: str = Field( "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", @@ -504,9 +618,18 @@ class Units(DynamicTable): ) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) - spike_times_index: Optional[Named[VectorIndex]] = Field( + electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( None, - description="""Index into the spike_times dataset.""", + description="""Electrode group that each spike unit came from.""", + json_schema_extra={ + "linkml_meta": { + "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} + } + }, + ) + electrodes: Optional[Named[DynamicTableRegion]] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -516,12 +639,9 @@ class Units(DynamicTable): } }, ) - spike_times: Optional[UnitsSpikeTimes] = Field( - None, description="""Spike times for each unit in seconds.""" - ) - obs_intervals_index: Optional[Named[VectorIndex]] = Field( + electrodes_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into the obs_intervals dataset.""", + description="""Index into electrodes.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -547,9 +667,9 @@ class Units(DynamicTable): }, ) ) - electrodes_index: Optional[Named[VectorIndex]] = Field( + obs_intervals_index: Optional[Named[VectorIndex]] = Field( None, - description="""Index into electrodes.""", + description="""Index into the obs_intervals dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -559,9 +679,12 @@ class Units(DynamicTable): } }, ) - electrodes: Optional[Named[DynamicTableRegion]] = Field( + spike_times: Optional[UnitsSpikeTimes] = Field( + None, description="""Spike times for each unit in seconds.""" + ) + spike_times_index: Optional[Named[VectorIndex]] = Field( None, - description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + description="""Index into the spike_times dataset.""", json_schema_extra={ "linkml_meta": { "annotations": { @@ -571,41 +694,15 @@ class Units(DynamicTable): } }, ) - electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field( - None, - description="""Electrode group that each spike unit came from.""", - json_schema_extra={ - "linkml_meta": { - "array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1} - } - }, + waveform_mean: Optional[UnitsWaveformMean] = Field( + None, description="""Spike waveform mean for each spike unit.""" ) - waveform_mean: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - VectorData[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], - ] - ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( - Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, - ) + waveform_sd: Optional[UnitsWaveformSd] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[UnitsWaveforms] = Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", ) waveforms_index: Optional[Named[VectorIndex]] = Field( None, @@ -671,14 +768,109 @@ class UnitsSpikeTimes(VectorData): ] = Field(None) +class UnitsWaveformMean(VectorData): + """ + Spike waveform mean for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_mean"] = Field( + "waveform_mean", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformSd(VectorData): + """ + Spike waveform standard deviation for each spike unit. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveform_sd"] = Field( + "waveform_sd", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveforms(VectorData): + """ + Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"}) + + name: Literal["waveforms"] = Field( + "waveforms", + json_schema_extra={ + "linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"} + }, + ) + sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""") + unit: Optional[Literal["volts"]] = Field( + "volts", + description="""Unit of measurement. This value is fixed to 'volts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model AbstractFeatureSeries.model_rebuild() AbstractFeatureSeriesData.model_rebuild() AnnotationSeries.model_rebuild() +AnnotationSeriesData.model_rebuild() IntervalSeries.model_rebuild() +IntervalSeriesData.model_rebuild() DecompositionSeries.model_rebuild() DecompositionSeriesData.model_rebuild() DecompositionSeriesBands.model_rebuild() Units.model_rebuild() UnitsSpikeTimes.model_rebuild() +UnitsWaveformMean.model_rebuild() +UnitsWaveformSd.model_rebuild() +UnitsWaveforms.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py index 626a28c..bc21582 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -121,9 +121,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field( + data: OptogeneticSeriesData = Field( ..., description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""", ) @@ -173,6 +171,47 @@ class OptogeneticSeries(TimeSeries): ) +class OptogeneticSeriesData(ConfiguredBaseModel): + """ + Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: Literal["watts"] = Field( + "watts", + description="""Unit of measurement for data, which is fixed to 'watts'.""", + json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class OptogeneticStimulusSite(NWBContainer): """ A site of optogenetic stimulation. @@ -203,4 +242,5 @@ class OptogeneticStimulusSite(NWBContainer): # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model OptogeneticSeries.model_rebuild() +OptogeneticSeriesData.model_rebuild() OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py index d462064..8104916 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -27,7 +27,7 @@ from ...core.v2_7_0.core_nwb_base import ( TimeSeriesSync, ) from ...core.v2_7_0.core_nwb_device import Device -from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile +from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesData, ImageSeriesExternalFile from ...hdmf_common.v1_8_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -160,21 +160,21 @@ class OnePhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[float] = Field( - None, - description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + binning: Optional[int] = Field( + None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" ) exposure_time: Optional[float] = Field( None, description="""Exposure time of the sample; often the inverse of the frequency.""" ) - binning: Optional[int] = Field( - None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" - ) - power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + scan_line_rate: Optional[float] = Field( + None, + description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + ) imaging_plane: Union[ImagingPlane, str] = Field( ..., json_schema_extra={ @@ -184,9 +184,7 @@ class OnePhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -200,8 +198,9 @@ class OnePhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -278,9 +277,7 @@ class TwoPhotonSeries(ImageSeries): } }, ) - data: Union[ - NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] - ] = Field( + data: ImageSeriesData = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) @@ -294,8 +291,9 @@ class TwoPhotonSeries(ImageSeries): description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", ) format: Optional[str] = Field( - None, + "raw", description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}}, ) device: Optional[Union[Device, str]] = Field( None, @@ -353,9 +351,7 @@ class RoiResponseSeries(TimeSeries): ) name: str = Field(...) - data: Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] - ] = Field(..., description="""Signals from ROIs.""") + data: RoiResponseSeriesData = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", @@ -405,6 +401,46 @@ class RoiResponseSeries(TimeSeries): ) +class RoiResponseSeriesData(ConfiguredBaseModel): + """ + Signals from ROIs. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + conversion: Optional[float] = Field( + 1.0, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + -1.0, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, + ) + unit: str = Field( + ..., + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + ] + ] = Field(None) + + class DfOverF(NWBDataInterface): """ dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). @@ -471,6 +507,10 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) + pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) pixel_mask_index: Optional[Named[VectorIndex]] = Field( None, description="""Index into pixel_mask.""", @@ -483,9 +523,9 @@ class PlaneSegmentation(DynamicTable): } }, ) - pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( + voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( None, - description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) voxel_mask_index: Optional[Named[VectorIndex]] = Field( None, @@ -499,10 +539,6 @@ class PlaneSegmentation(DynamicTable): } }, ) - voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( - None, - description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", - ) reference_images: Optional[Dict[str, ImageSeries]] = Field( None, description="""Image stacks that the segmentation masks apply to.""", @@ -802,6 +838,7 @@ class CorrectedImageStack(NWBDataInterface): OnePhotonSeries.model_rebuild() TwoPhotonSeries.model_rebuild() RoiResponseSeries.model_rebuild() +RoiResponseSeriesData.model_rebuild() DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 26f2f92..c7ced82 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py index 5747cde..fd6b259 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py @@ -40,6 +40,7 @@ from ...core.v2_7_0.core_nwb_ecephys import ( ClusterWaveforms, Clustering, ElectricalSeries, + ElectricalSeriesData, ElectrodeGroup, ElectrodeGroupPosition, EventDetection, @@ -48,6 +49,7 @@ from ...core.v2_7_0.core_nwb_ecephys import ( FilteredEphys, LFP, SpikeEventSeries, + SpikeEventSeriesData, ) from ...core.v2_7_0.core_nwb_epoch import TimeIntervals from ...core.v2_7_0.core_nwb_file import ( @@ -102,9 +104,12 @@ from ...core.v2_7_0.core_nwb_image import ( GrayscaleImage, ImageMaskSeries, ImageSeries, + ImageSeriesData, ImageSeriesExternalFile, IndexSeries, + IndexSeriesData, OpticalSeries, + OpticalSeriesData, RGBAImage, RGBImage, ) @@ -112,14 +117,23 @@ from ...core.v2_7_0.core_nwb_misc import ( AbstractFeatureSeries, AbstractFeatureSeriesData, AnnotationSeries, + AnnotationSeriesData, DecompositionSeries, DecompositionSeriesBands, DecompositionSeriesData, IntervalSeries, + IntervalSeriesData, Units, UnitsSpikeTimes, + UnitsWaveformMean, + UnitsWaveformSd, + UnitsWaveforms, +) +from ...core.v2_7_0.core_nwb_ogen import ( + OptogeneticSeries, + OptogeneticSeriesData, + OptogeneticStimulusSite, ) -from ...core.v2_7_0.core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite from ...core.v2_7_0.core_nwb_ophys import ( CorrectedImageStack, DfOverF, @@ -136,6 +150,7 @@ from ...core.v2_7_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, RoiResponseSeries, + RoiResponseSeriesData, TwoPhotonSeries, ) from ...core.v2_7_0.core_nwb_retinotopy import ( @@ -178,7 +193,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py index 8b13789..e69de29 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py @@ -1 +0,0 @@ - diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index a730ec1..c704fa9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 27a287c..c604b38 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 3112a4f..6719d89 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 0759b51..d47f747 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index e805fe7..dceaa2e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 8f0d610..d6699e6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -928,12 +928,12 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ) name: str = Field(...) - table: DynamicTable = Field( - ..., description="""Reference to the DynamicTable object that this region applies to.""" - ) description: str = Field( ..., description="""Description of what this table region points to.""" ) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml index b11b02c..04aec2d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml @@ -124,7 +124,6 @@ classes: external file. range: TimeSeries__data required: true - multivalued: false inlined: true starting_time: name: starting_time @@ -132,8 +131,6 @@ classes: uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. range: TimeSeries__starting_time - required: false - multivalued: false inlined: true timestamps: name: timestamps @@ -176,8 +173,6 @@ classes: external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. range: TimeSeries__sync - required: false - multivalued: false inlined: true inlined_as_list: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml index f63c218..27f87f4 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml @@ -38,14 +38,11 @@ classes: reference frame. range: SpatialSeries__data required: true - multivalued: false inlined: true reference_frame: name: reference_frame description: Description defining what exactly 'straight-ahead' means. range: text - required: false - multivalued: false tree_root: true SpatialSeries__data: name: SpatialSeries__data @@ -59,6 +56,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml index 7a93461..8ce3b23 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml @@ -39,40 +39,6 @@ classes: about the filter properties as possible. range: text required: false - data: - name: data - description: Recorded voltage data. - range: numeric - required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_channels - - array: - dimensions: - - alias: num_times - - alias: num_channels - - alias: num_samples - electrodes: - name: electrodes - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: DynamicTableRegion pointer to the electrodes that this time series - was generated from. - range: DynamicTableRegion - required: true - multivalued: false - inlined: true channel_conversion: name: channel_conversion description: Channel-specific conversion factor. Multiply the data in the @@ -90,7 +56,100 @@ classes: range: float32 required: false multivalued: false + data: + name: data + description: Recorded voltage data. + range: ElectricalSeries__data + required: true + inlined: true + electrodes: + name: electrodes + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + range: DynamicTableRegion + required: true + inlined: true tree_root: true + ElectricalSeries__data: + name: ElectricalSeries__data + description: Recorded voltage data. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion' + and 'channel_conversion' (if present). + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples SpikeEventSeries: name: SpikeEventSeries description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold @@ -111,19 +170,9 @@ classes: data: name: data description: Spike waveforms. - range: numeric + range: SpikeEventSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_events - - alias: num_samples - - array: - dimensions: - - alias: num_events - - alias: num_channels - - alias: num_samples + inlined: true timestamps: name: timestamps description: Timestamps for samples stored in data, in seconds, relative to @@ -137,6 +186,73 @@ classes: required: true multivalued: false tree_root: true + SpikeEventSeries__data: + name: SpikeEventSeries__data + description: Spike waveforms. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples FeatureExtraction: name: FeatureExtraction description: Features, such as PC1 and PC2, that are extracted from signals stored @@ -192,7 +308,6 @@ classes: was generated from. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true EventDetection: @@ -212,7 +327,6 @@ classes: or dV/dT threshold, as well as relevant values. range: text required: true - multivalued: false source_idx: name: source_idx description: Indices (zero-based) into source ElectricalSeries::data array @@ -241,7 +355,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ElectricalSeries @@ -323,8 +436,6 @@ classes: name: position description: stereotaxic or common framework coordinates range: ElectrodeGroup__position - required: false - multivalued: false inlined: true device: name: device @@ -333,7 +444,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -356,24 +466,18 @@ classes: array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false y: name: y description: y coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false z: name: z description: z coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ClusterWaveforms: name: ClusterWaveforms description: DEPRECATED The mean waveform shape, including standard deviation, @@ -395,7 +499,6 @@ classes: description: Filtering applied to data before generating mean/sd range: text required: true - multivalued: false waveform_mean: name: waveform_mean description: The mean waveform for each cluster, using the same indices for @@ -427,7 +530,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Clustering @@ -451,7 +553,6 @@ classes: clusters curated using Klusters, etc) range: text required: true - multivalued: false num: name: num description: Cluster number of each event diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml index 4eb778d..c095b7b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml @@ -63,15 +63,11 @@ classes: value: neurodata_type_inc description: Index for tags. range: VectorIndex - required: false - multivalued: false inlined: true timeseries: name: timeseries description: An index into a TimeSeries object. range: TimeIntervals__timeseries - required: false - multivalued: false inlined: true timeseries_index: name: timeseries_index @@ -84,8 +80,6 @@ classes: value: neurodata_type_inc description: Index for timeseries. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true TimeIntervals__timeseries: @@ -108,8 +102,6 @@ classes: array: exact_number_dimensions: 1 range: int32 - required: false - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -117,14 +109,10 @@ classes: array: exact_number_dimensions: 1 range: int32 - required: false - multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. array: exact_number_dimensions: 1 range: TimeSeries - required: false - multivalued: false inlined: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml index a3eb463..be74485 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml @@ -81,13 +81,11 @@ classes: other files. range: text required: true - multivalued: false session_description: name: session_description description: A description of the experimental session and data in the file. range: text required: true - multivalued: false session_start_time: name: session_start_time description: 'Date and time of the experiment/session start. The date is stored @@ -96,7 +94,6 @@ classes: offset. Date accuracy is up to milliseconds.' range: isodatetime required: true - multivalued: false timestamps_reference_time: name: timestamps_reference_time description: 'Date and time corresponding to time zero of all timestamps. @@ -106,7 +103,6 @@ classes: times stored in the file use this time as reference (i.e., time zero).' range: isodatetime required: true - multivalued: false acquisition: name: acquisition description: Data streams recorded from the system, including ephys, ophys, @@ -185,7 +181,6 @@ classes: can exist in the present file or can be linked to a remote library file. range: NWBFile__stimulus required: true - multivalued: false inlined: true inlined_as_list: true general: @@ -207,7 +202,6 @@ classes: should not be created unless there is data to store within them. range: NWBFile__general required: true - multivalued: false inlined: true inlined_as_list: true intervals: @@ -217,16 +211,12 @@ classes: an experiment, or epochs (see epochs subgroup) deriving from analysis of data. range: NWBFile__intervals - required: false - multivalued: false inlined: true inlined_as_list: true units: name: units description: Data about sorted spike units. range: Units - required: false - multivalued: false inlined: true inlined_as_list: false tree_root: true @@ -299,14 +289,10 @@ classes: name: data_collection description: Notes about data collection and analysis. range: text - required: false - multivalued: false experiment_description: name: experiment_description description: General description of the experiment. range: text - required: false - multivalued: false experimenter: name: experimenter description: Name of person(s) who performed the experiment. Can also specify @@ -321,8 +307,6 @@ classes: name: institution description: Institution(s) where experiment was performed. range: text - required: false - multivalued: false keywords: name: keywords description: Terms to search over. @@ -336,28 +320,20 @@ classes: name: lab description: Laboratory where experiment was performed. range: text - required: false - multivalued: false notes: name: notes description: Notes about the experiment. range: text - required: false - multivalued: false pharmacology: name: pharmacology description: Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. range: text - required: false - multivalued: false protocol: name: protocol description: Experimental protocol, if applicable. e.g., include IACUC protocol number. range: text - required: false - multivalued: false related_publications: name: related_publications description: Publication information. PMID, DOI, URL, etc. @@ -371,49 +347,36 @@ classes: name: session_id description: Lab-specific ID for the session. range: text - required: false - multivalued: false slices: name: slices description: Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. range: text - required: false - multivalued: false source_script: name: source_script description: Script file or link to public source code used to create this NWB file. range: general__source_script - required: false - multivalued: false inlined: true stimulus: name: stimulus description: Notes about stimuli, such as how and where they were presented. range: text - required: false - multivalued: false surgery: name: surgery description: Narrative description about surgery/surgeries, including date(s) and who performed surgery. range: text - required: false - multivalued: false virus: name: virus description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - required: false - multivalued: false lab_meta_data: name: lab_meta_data description: Place-holder than can be extended so that lab-specific meta-data can be placed in /general. range: LabMetaData - required: false multivalued: true inlined: true inlined_as_list: false @@ -431,24 +394,18 @@ classes: description: Information about the animal or person from which the data was measured. range: Subject - required: false - multivalued: false inlined: true inlined_as_list: false extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. range: general__extracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. range: general__intracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true optogenetics: @@ -503,7 +460,6 @@ classes: name: electrode_group description: Physical group of electrodes. range: ElectrodeGroup - required: false multivalued: true inlined: true inlined_as_list: false @@ -511,8 +467,6 @@ classes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes - required: false - multivalued: false inlined: true inlined_as_list: true extracellular_ephys__electrodes: @@ -656,13 +610,10 @@ classes: frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries. range: text - required: false - multivalued: false intracellular_electrode: name: intracellular_electrode description: An intracellular electrode. range: IntracellularElectrode - required: false multivalued: true inlined: true inlined_as_list: false @@ -670,8 +621,6 @@ classes: name: sweep_table description: The table which groups different PatchClampSeries together. range: SweepTable - required: false - multivalued: false inlined: true inlined_as_list: false NWBFile__intervals: @@ -692,24 +641,18 @@ classes: description: Divisions in time marking experimental stages or sub-divisions of a single recording session. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false trials: name: trials description: Repeated experimental events that have a logical grouping. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false invalid_times: name: invalid_times description: Time intervals that should be removed from analysis. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false time_intervals: @@ -717,7 +660,6 @@ classes: description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals - required: false multivalued: true inlined: true inlined_as_list: false @@ -746,56 +688,38 @@ classes: name: age description: Age of subject. Can be supplied instead of 'date_of_birth'. range: text - required: false - multivalued: false date_of_birth: name: date_of_birth description: Date of birth of subject. Can be supplied instead of 'age'. range: isodatetime - required: false - multivalued: false description: name: description description: Description of subject and where subject came from (e.g., breeder, if animal). range: text - required: false - multivalued: false genotype: name: genotype description: Genetic strain. If absent, assume Wild Type (WT). range: text - required: false - multivalued: false sex: name: sex description: Gender of subject. range: text - required: false - multivalued: false species: name: species description: Species of subject. range: text - required: false - multivalued: false strain: name: strain description: Strain of subject. range: text - required: false - multivalued: false subject_id: name: subject_id description: ID of animal/person used/participating in experiment (lab convention). range: text - required: false - multivalued: false weight: name: weight description: Weight at time of experiment, at time of surgery and at other important times. range: text - required: false - multivalued: false tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml index 26823be..e37c11d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml @@ -41,15 +41,12 @@ classes: description: Recorded voltage or current. range: PatchClampSeries__data required: true - multivalued: false inlined: true gain: name: gain description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). range: float32 - required: false - multivalued: false electrode: name: electrode annotations: @@ -57,7 +54,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: IntracellularElectrode @@ -74,6 +70,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -99,31 +131,24 @@ classes: identifier: true range: string required: true + bias_current: + name: bias_current + description: Bias current, in amps. + range: float32 + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + range: float32 + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + range: float32 data: name: data description: Recorded voltage. range: CurrentClampSeries__data required: true - multivalued: false inlined: true - bias_current: - name: bias_current - description: Bias current, in amps. - range: float32 - required: false - multivalued: false - bridge_balance: - name: bridge_balance - description: Bridge balance, in ohms. - range: float32 - required: false - multivalued: false - capacitance_compensation: - name: capacitance_compensation - description: Capacitance compensation, in farads. - range: float32 - required: false - multivalued: false tree_root: true CurrentClampSeries__data: name: CurrentClampSeries__data @@ -136,6 +161,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -147,8 +208,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IZeroClampSeries: name: IZeroClampSeries description: Voltage data from an intracellular recording when all current and @@ -175,19 +238,16 @@ classes: description: Bias current, in amps, fixed to 0.0. range: float32 required: true - multivalued: false bridge_balance: name: bridge_balance description: Bridge balance, in ohms, fixed to 0.0. range: float32 required: true - multivalued: false capacitance_compensation: name: capacitance_compensation description: Capacitance compensation, in farads, fixed to 0.0. range: float32 required: true - multivalued: false tree_root: true CurrentClampStimulusSeries: name: CurrentClampStimulusSeries @@ -204,7 +264,6 @@ classes: description: Stimulus current applied. range: CurrentClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true CurrentClampStimulusSeries__data: @@ -218,6 +277,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -229,8 +324,10 @@ classes: equals_string: amperes value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries: name: VoltageClampSeries description: Current data from an intracellular voltage-clamp recording. A corresponding @@ -243,87 +340,48 @@ classes: identifier: true range: string required: true - data: - name: data - description: Recorded current. - range: VoltageClampSeries__data - required: true - multivalued: false - inlined: true capacitance_fast: name: capacitance_fast description: Fast capacitance, in farads. range: VoltageClampSeries__capacitance_fast - required: false - multivalued: false inlined: true capacitance_slow: name: capacitance_slow description: Slow capacitance, in farads. range: VoltageClampSeries__capacitance_slow - required: false - multivalued: false + inlined: true + data: + name: data + description: Recorded current. + range: VoltageClampSeries__data + required: true inlined: true resistance_comp_bandwidth: name: resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. range: VoltageClampSeries__resistance_comp_bandwidth - required: false - multivalued: false inlined: true resistance_comp_correction: name: resistance_comp_correction description: Resistance compensation correction, in percent. range: VoltageClampSeries__resistance_comp_correction - required: false - multivalued: false inlined: true resistance_comp_prediction: name: resistance_comp_prediction description: Resistance compensation prediction, in percent. range: VoltageClampSeries__resistance_comp_prediction - required: false - multivalued: false inlined: true whole_cell_capacitance_comp: name: whole_cell_capacitance_comp description: Whole cell capacitance compensation, in farads. range: VoltageClampSeries__whole_cell_capacitance_comp - required: false - multivalued: false inlined: true whole_cell_series_resistance_comp: name: whole_cell_series_resistance_comp description: Whole cell series resistance compensation, in ohms. range: VoltageClampSeries__whole_cell_series_resistance_comp - required: false - multivalued: false inlined: true tree_root: true - VoltageClampSeries__data: - name: VoltageClampSeries__data - description: Recorded current. - attributes: - name: - name: name - ifabsent: string(data) - identifier: true - range: string - required: true - equals_string: data - unit: - name: unit - description: Base unit of measurement for working with the data. which is - fixed to 'amperes'. Actual stored values are not necessarily stored in these - units. To access the data in these units, multiply 'data' by 'conversion'. - ifabsent: string(amperes) - range: text - required: true - equals_string: amperes - value: - name: value - range: AnyType - required: true VoltageClampSeries__capacitance_fast: name: VoltageClampSeries__capacitance_fast description: Fast capacitance, in farads. @@ -368,6 +426,68 @@ classes: name: value range: float32 required: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) + range: text + required: true + equals_string: amperes + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries__resistance_comp_bandwidth: name: VoltageClampSeries__resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. @@ -498,7 +618,6 @@ classes: description: Stimulus voltage applied. range: VoltageClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true VoltageClampStimulusSeries__data: @@ -512,6 +631,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -523,8 +678,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IntracellularElectrode: name: IntracellularElectrode description: An intracellular electrode and its metadata. @@ -540,45 +697,32 @@ classes: description: Description of electrode (e.g., whole-cell, sharp, etc.). range: text required: true - multivalued: false filtering: name: filtering description: Electrode specific filtering. range: text - required: false - multivalued: false initial_access_resistance: name: initial_access_resistance description: Initial access resistance. range: text - required: false - multivalued: false location: name: location description: Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. range: text - required: false - multivalued: false resistance: name: resistance description: Electrode resistance, in ohms. range: text - required: false - multivalued: false seal: name: seal description: Information about seal used for recording. range: text - required: false - multivalued: false slice: name: slice description: Information about slice used for recording. range: text - required: false - multivalued: false device: name: device annotations: @@ -586,7 +730,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -602,15 +745,6 @@ classes: identifier: true range: string required: true - sweep_number: - name: sweep_number - description: Sweep number of the PatchClampSeries in that row. - array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: uint32 - required: true - multivalued: false series: name: series description: The PatchClampSeries with the sweep number in that row. @@ -633,6 +767,14 @@ classes: description: Index for series. range: VectorIndex required: true - multivalued: false inlined: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + array: + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: uint32 + required: true + multivalued: false tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml index adfab1b..28b17e1 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.image.yaml @@ -90,21 +90,8 @@ classes: data: name: data description: Binary data representing images across frames. - range: numeric - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - alias: z + range: ImageSeries__data + inlined: true dimension: name: dimension description: Number of pixels on x, y, (and z) axes. @@ -122,8 +109,6 @@ classes: used if the image is stored in another NWB file and that file is linked to this file. range: ImageSeries__external_file - required: false - multivalued: false inlined: true format: name: format @@ -131,22 +116,88 @@ classes: contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + ifabsent: string(raw) range: text - required: false - multivalued: false device: name: device annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Device - range: string tree_root: true + ImageSeries__data: + name: ImageSeries__data + description: Binary data representing images across frames. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z ImageSeries__external_file: name: ImageSeries__external_file description: Paths to one or more external file(s). The field is only present @@ -205,7 +256,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries @@ -225,12 +275,16 @@ classes: identifier: true range: string required: true + data: + name: data + description: Images presented to subject, either grayscale or RGB + range: OpticalSeries__data + required: true + inlined: true distance: name: distance description: Distance from camera/monitor to target/eye. range: float32 - required: false - multivalued: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -246,12 +300,69 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 - data: - name: data - description: Images presented to subject, either grayscale or RGB - range: numeric + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + range: text + tree_root: true + OpticalSeries__data: + name: OpticalSeries__data + description: Images presented to subject, either grayscale or RGB + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string required: true - multivalued: false + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + value: + name: value + range: numeric any_of: - array: dimensions: @@ -265,14 +376,6 @@ classes: - alias: y - alias: r_g_b exact_cardinality: 3 - orientation: - name: orientation - description: Description of image relative to some reference frame (e.g., - which way is up). Must also specify frame of reference. - range: text - required: false - multivalued: false - tree_root: true IndexSeries: name: IndexSeries description: Stores indices to image frames stored in an ImageSeries. The purpose @@ -291,12 +394,9 @@ classes: data: name: data description: Index of the frame in the referenced ImageSeries. - array: - dimensions: - - alias: num_times - range: int32 + range: IndexSeries__data required: true - multivalued: false + inlined: true indexed_timeseries: name: indexed_timeseries annotations: @@ -304,9 +404,68 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries - range: string tree_root: true + IndexSeries__data: + name: IndexSeries__data + description: Index of the frame in the referenced ImageSeries. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + array: + dimensions: + - alias: num_times + range: int32 diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml index c2323b8..e151936 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.misc.yaml @@ -38,7 +38,6 @@ classes: description: Values of each feature at each time. range: AbstractFeatureSeries__data required: true - multivalued: false inlined: true feature_units: name: feature_units @@ -70,6 +69,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Since there can be different units for different features, store @@ -105,13 +140,70 @@ classes: data: name: data description: Annotations made during an experiment. + range: AnnotationSeries__data + required: true + inlined: true + tree_root: true + AnnotationSeries__data: + name: AnnotationSeries__data + description: Annotations made during an experiment. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: text - required: true - multivalued: false - tree_root: true IntervalSeries: name: IntervalSeries description: Stores intervals of data. The timestamps field stores the beginning @@ -131,13 +223,70 @@ classes: data: name: data description: Use values >0 if interval started, <0 if interval ended. + range: IntervalSeries__data + required: true + inlined: true + tree_root: true + IntervalSeries__data: + name: IntervalSeries__data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: int8 - required: true - multivalued: false - tree_root: true DecompositionSeries: name: DecompositionSeries description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -153,14 +302,12 @@ classes: description: Data decomposed into frequency bands. range: DecompositionSeries__data required: true - multivalued: false inlined: true metric: name: metric description: The metric used, e.g. phase, amplitude, power. range: text required: true - multivalued: false source_channels: name: source_channels annotations: @@ -173,8 +320,6 @@ classes: description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion - required: false - multivalued: false inlined: true bands: name: bands @@ -182,7 +327,6 @@ classes: from. There should be one row in this table for each band. range: DecompositionSeries__bands required: true - multivalued: false inlined: true inlined_as_list: true source_timeseries: @@ -191,8 +335,6 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: TimeSeries @@ -209,6 +351,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -289,63 +467,13 @@ classes: identifier: true range: string required: true - spike_times_index: - name: spike_times_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the spike_times dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - spike_times: - name: spike_times - description: Spike times for each unit. - range: Units__spike_times - required: false - multivalued: false - inlined: true - obs_intervals_index: - name: obs_intervals_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the obs_intervals dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - obs_intervals: - name: obs_intervals - description: Observation intervals for each unit. + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. array: - dimensions: - - alias: num_intervals - - alias: start_end - exact_cardinality: 2 - range: float64 - required: false - multivalued: false - electrodes_index: - name: electrodes_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into electrodes. - range: VectorIndex + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: ElectrodeGroup required: false multivalued: false inlined: true @@ -360,51 +488,69 @@ classes: value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion - required: false - multivalued: false inlined: true - electrode_group: - name: electrode_group - description: Electrode group that each spike unit came from. + electrodes_index: + name: electrodes_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into electrodes. + range: VectorIndex + inlined: true + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: ElectrodeGroup + dimensions: + - alias: num_intervals + - alias: start_end + exact_cardinality: 2 + range: float64 required: false multivalued: false + obs_intervals_index: + name: obs_intervals_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the obs_intervals dataset. + range: VectorIndex + inlined: true + spike_times: + name: spike_times + description: Spike times for each unit. + range: Units__spike_times + inlined: true + spike_times_index: + name: spike_times_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the spike_times dataset. + range: VectorIndex inlined: true waveform_mean: name: waveform_mean description: Spike waveform mean for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_mean + inlined: true waveform_sd: name: waveform_sd description: Spike waveform standard deviation for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_sd + inlined: true waveforms: name: waveforms description: Individual waveforms for each spike on each electrode. This is @@ -430,13 +576,8 @@ classes: order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - array: - dimensions: - - alias: num_waveforms - - alias: num_samples - range: numeric - required: false - multivalued: false + range: Units__waveforms + inlined: true waveforms_index: name: waveforms_index annotations: @@ -449,8 +590,6 @@ classes: description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true waveforms_index_index: name: waveforms_index_index @@ -464,8 +603,6 @@ classes: description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true Units__spike_times: @@ -489,3 +626,97 @@ classes: for the spike time to be between samples. range: float64 required: false + Units__waveform_mean: + name: Units__waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_mean) + identifier: true + range: string + required: true + equals_string: waveform_mean + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveform_sd: + name: Units__waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_sd) + identifier: true + range: string + required: true + equals_string: waveform_sd + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveforms: + name: Units__waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveforms) + identifier: true + range: string + required: true + equals_string: waveforms + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml index 0dc7be0..c4078fa 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml @@ -27,12 +27,9 @@ classes: data: name: data description: Applied power for optogenetic stimulus, in watts. - array: - dimensions: - - alias: num_times - range: numeric + range: OptogeneticSeries__data required: true - multivalued: false + inlined: true site: name: site annotations: @@ -40,12 +37,71 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: OptogeneticStimulusSite - range: string tree_root: true + OptogeneticSeries__data: + name: OptogeneticSeries__data + description: Applied power for optogenetic stimulus, in watts. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + ifabsent: string(watts) + range: text + required: true + equals_string: watts + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric OptogeneticStimulusSite: name: OptogeneticStimulusSite description: A site of optogenetic stimulation. @@ -61,13 +117,11 @@ classes: description: Description of stimulation site. range: text required: true - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false location: name: location description: Location of the stimulation site. Specify the area, layer, comments @@ -75,7 +129,6 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false device: name: device annotations: @@ -83,7 +136,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml index 40860fc..5041d82 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml @@ -60,7 +60,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -80,17 +79,9 @@ classes: data: name: data description: Signals from ROIs. - range: numeric + range: RoiResponseSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_rois + inlined: true rois: name: rois annotations: @@ -104,9 +95,73 @@ classes: on the ROIs stored in this timeseries. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true + RoiResponseSeries__data: + name: RoiResponseSeries__data + description: Signals from ROIs. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois DfOverF: name: DfOverF description: dF/F information about a region of interest (ROI). Storage hierarchy @@ -182,6 +237,13 @@ classes: - alias: num_x - alias: num_y - alias: num_z + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + range: PlaneSegmentation__pixel_mask + inlined: true pixel_mask_index: name: pixel_mask_index annotations: @@ -193,17 +255,13 @@ classes: value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex - required: false - multivalued: false inlined: true - pixel_mask: - name: pixel_mask - description: 'Pixel masks for each ROI: a list of indices and weights for - the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' - range: PlaneSegmentation__pixel_mask - required: false - multivalued: false + range: PlaneSegmentation__voxel_mask inlined: true voxel_mask_index: name: voxel_mask_index @@ -216,17 +274,6 @@ classes: value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex - required: false - multivalued: false - inlined: true - voxel_mask: - name: voxel_mask - description: 'Voxel masks for each ROI: a list of indices and weights for - the ROI. Voxel masks are concatenated and parsing of this dataset is maintained - by the PlaneSegmentation' - range: PlaneSegmentation__voxel_mask - required: false - multivalued: false inlined: true reference_images: name: reference_images @@ -243,7 +290,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -269,24 +315,18 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Pixel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the pixel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false PlaneSegmentation__voxel_mask: name: PlaneSegmentation__voxel_mask description: 'Voxel masks for each ROI: a list of indices and weights for the @@ -307,32 +347,24 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Voxel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false z: name: z description: Voxel z-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the voxel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ImagingPlane: name: ImagingPlane description: An imaging plane and its metadata. @@ -347,27 +379,21 @@ classes: name: description description: Description of the imaging plane. range: text - required: false - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false imaging_rate: name: imaging_rate description: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. range: float32 - required: false - multivalued: false indicator: name: indicator description: Calcium indicator. range: text required: true - multivalued: false location: name: location description: Location of the imaging plane. Specify the area, layer, comments @@ -375,15 +401,12 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false manifold: name: manifold description: DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. range: ImagingPlane__manifold - required: false - multivalued: false inlined: true origin_coords: name: origin_coords @@ -391,8 +414,6 @@ classes: 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). range: ImagingPlane__origin_coords - required: false - multivalued: false inlined: true grid_spacing: name: grid_spacing @@ -400,8 +421,6 @@ classes: in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. range: ImagingPlane__grid_spacing - required: false - multivalued: false inlined: true reference_frame: name: reference_frame @@ -423,8 +442,6 @@ classes: axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." range: text - required: false - multivalued: false optical_channel: name: optical_channel description: An optical channel used to record from an imaging plane. @@ -440,7 +457,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -573,13 +589,11 @@ classes: description: Description or other notes about the channel. range: text required: true - multivalued: false emission_lambda: name: emission_lambda description: Emission wavelength for channel, in nm. range: float32 required: true - multivalued: false tree_root: true MotionCorrection: name: MotionCorrection @@ -610,7 +624,6 @@ classes: description: Image stack with frames shifted to the common coordinates. range: ImageSeries required: true - multivalued: false inlined: true inlined_as_list: false xy_translation: @@ -619,7 +632,6 @@ classes: coordinates, for example, to align each frame to a reference image. range: TimeSeries required: true - multivalued: false inlined: true inlined_as_list: false original: @@ -629,7 +641,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml index 97007ea..2708c7e 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml @@ -37,30 +37,24 @@ classes: description: Phase response to stimulus on the first measured axis. range: ImagingRetinotopy__axis_1_phase_map required: true - multivalued: false inlined: true axis_1_power_map: name: axis_1_power_map description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_1_power_map - required: false - multivalued: false inlined: true axis_2_phase_map: name: axis_2_phase_map description: Phase response to stimulus on the second measured axis. range: ImagingRetinotopy__axis_2_phase_map required: true - multivalued: false inlined: true axis_2_power_map: name: axis_2_power_map description: Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_2_power_map - required: false - multivalued: false inlined: true axis_descriptions: name: axis_descriptions @@ -79,16 +73,12 @@ classes: description: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].' range: ImagingRetinotopy__focal_depth_image - required: false - multivalued: false inlined: true sign_map: name: sign_map description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: ImagingRetinotopy__sign_map - required: false - multivalued: false inlined: true vasculature_image: name: vasculature_image @@ -96,7 +86,6 @@ classes: [rows][columns]' range: ImagingRetinotopy__vasculature_image required: true - multivalued: false inlined: true tree_root: true ImagingRetinotopy__axis_1_phase_map: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml index 1bfb911..68f456b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml @@ -47,7 +47,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -56,7 +55,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to @@ -64,7 +62,6 @@ classes: exact_number_dimensions: 1 range: TimeSeries required: true - multivalued: false inlined: true tree_root: true Image: @@ -166,7 +163,6 @@ classes: external file. range: TimeSeries__data required: true - multivalued: false inlined: true starting_time: name: starting_time @@ -174,8 +170,6 @@ classes: uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. range: TimeSeries__starting_time - required: false - multivalued: false inlined: true timestamps: name: timestamps @@ -218,8 +212,6 @@ classes: external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. range: TimeSeries__sync - required: false - multivalued: false inlined: true inlined_as_list: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml index 47aa752..477072b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml @@ -38,14 +38,11 @@ classes: reference frame. range: SpatialSeries__data required: true - multivalued: false inlined: true reference_frame: name: reference_frame description: Description defining what exactly 'straight-ahead' means. range: text - required: false - multivalued: false tree_root: true SpatialSeries__data: name: SpatialSeries__data @@ -59,6 +56,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml index 4d8e539..47187f0 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml @@ -39,40 +39,6 @@ classes: about the filter properties as possible. range: text required: false - data: - name: data - description: Recorded voltage data. - range: numeric - required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_channels - - array: - dimensions: - - alias: num_times - - alias: num_channels - - alias: num_samples - electrodes: - name: electrodes - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: DynamicTableRegion pointer to the electrodes that this time series - was generated from. - range: DynamicTableRegion - required: true - multivalued: false - inlined: true channel_conversion: name: channel_conversion description: Channel-specific conversion factor. Multiply the data in the @@ -90,7 +56,100 @@ classes: range: float32 required: false multivalued: false + data: + name: data + description: Recorded voltage data. + range: ElectricalSeries__data + required: true + inlined: true + electrodes: + name: electrodes + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + range: DynamicTableRegion + required: true + inlined: true tree_root: true + ElectricalSeries__data: + name: ElectricalSeries__data + description: Recorded voltage data. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion' + and 'channel_conversion' (if present). + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples SpikeEventSeries: name: SpikeEventSeries description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold @@ -111,19 +170,9 @@ classes: data: name: data description: Spike waveforms. - range: numeric + range: SpikeEventSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_events - - alias: num_samples - - array: - dimensions: - - alias: num_events - - alias: num_channels - - alias: num_samples + inlined: true timestamps: name: timestamps description: Timestamps for samples stored in data, in seconds, relative to @@ -137,6 +186,73 @@ classes: required: true multivalued: false tree_root: true + SpikeEventSeries__data: + name: SpikeEventSeries__data + description: Spike waveforms. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples FeatureExtraction: name: FeatureExtraction description: Features, such as PC1 and PC2, that are extracted from signals stored @@ -192,7 +308,6 @@ classes: was generated from. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true EventDetection: @@ -212,7 +327,6 @@ classes: or dV/dT threshold, as well as relevant values. range: text required: true - multivalued: false source_idx: name: source_idx description: Indices (zero-based) into source ElectricalSeries::data array @@ -241,7 +355,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ElectricalSeries @@ -323,8 +436,6 @@ classes: name: position description: stereotaxic or common framework coordinates range: ElectrodeGroup__position - required: false - multivalued: false inlined: true device: name: device @@ -333,7 +444,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -356,24 +466,18 @@ classes: array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false y: name: y description: y coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false z: name: z description: z coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ClusterWaveforms: name: ClusterWaveforms description: DEPRECATED The mean waveform shape, including standard deviation, @@ -395,7 +499,6 @@ classes: description: Filtering applied to data before generating mean/sd range: text required: true - multivalued: false waveform_mean: name: waveform_mean description: The mean waveform for each cluster, using the same indices for @@ -427,7 +530,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Clustering @@ -451,7 +553,6 @@ classes: clusters curated using Klusters, etc) range: text required: true - multivalued: false num: name: num description: Cluster number of each event diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml index e264a54..eedea6f 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml @@ -63,15 +63,11 @@ classes: value: neurodata_type_inc description: Index for tags. range: VectorIndex - required: false - multivalued: false inlined: true timeseries: name: timeseries description: An index into a TimeSeries object. range: TimeIntervals__timeseries - required: false - multivalued: false inlined: true timeseries_index: name: timeseries_index @@ -84,8 +80,6 @@ classes: value: neurodata_type_inc description: Index for timeseries. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true TimeIntervals__timeseries: @@ -108,8 +102,6 @@ classes: array: exact_number_dimensions: 1 range: int32 - required: false - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -117,14 +109,10 @@ classes: array: exact_number_dimensions: 1 range: int32 - required: false - multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. array: exact_number_dimensions: 1 range: TimeSeries - required: false - multivalued: false inlined: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml index f81b157..f11e44b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml @@ -81,13 +81,11 @@ classes: other files. range: text required: true - multivalued: false session_description: name: session_description description: A description of the experimental session and data in the file. range: text required: true - multivalued: false session_start_time: name: session_start_time description: 'Date and time of the experiment/session start. The date is stored @@ -96,7 +94,6 @@ classes: offset. Date accuracy is up to milliseconds.' range: isodatetime required: true - multivalued: false timestamps_reference_time: name: timestamps_reference_time description: 'Date and time corresponding to time zero of all timestamps. @@ -106,7 +103,6 @@ classes: times stored in the file use this time as reference (i.e., time zero).' range: isodatetime required: true - multivalued: false acquisition: name: acquisition description: Data streams recorded from the system, including ephys, ophys, @@ -185,7 +181,6 @@ classes: can exist in the present file or can be linked to a remote library file. range: NWBFile__stimulus required: true - multivalued: false inlined: true inlined_as_list: true general: @@ -207,7 +202,6 @@ classes: should not be created unless there is data to store within them. range: NWBFile__general required: true - multivalued: false inlined: true inlined_as_list: true intervals: @@ -217,16 +211,12 @@ classes: an experiment, or epochs (see epochs subgroup) deriving from analysis of data. range: NWBFile__intervals - required: false - multivalued: false inlined: true inlined_as_list: true units: name: units description: Data about sorted spike units. range: Units - required: false - multivalued: false inlined: true inlined_as_list: false tree_root: true @@ -299,14 +289,10 @@ classes: name: data_collection description: Notes about data collection and analysis. range: text - required: false - multivalued: false experiment_description: name: experiment_description description: General description of the experiment. range: text - required: false - multivalued: false experimenter: name: experimenter description: Name of person(s) who performed the experiment. Can also specify @@ -321,8 +307,6 @@ classes: name: institution description: Institution(s) where experiment was performed. range: text - required: false - multivalued: false keywords: name: keywords description: Terms to search over. @@ -336,28 +320,20 @@ classes: name: lab description: Laboratory where experiment was performed. range: text - required: false - multivalued: false notes: name: notes description: Notes about the experiment. range: text - required: false - multivalued: false pharmacology: name: pharmacology description: Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. range: text - required: false - multivalued: false protocol: name: protocol description: Experimental protocol, if applicable. e.g., include IACUC protocol number. range: text - required: false - multivalued: false related_publications: name: related_publications description: Publication information. PMID, DOI, URL, etc. @@ -371,49 +347,36 @@ classes: name: session_id description: Lab-specific ID for the session. range: text - required: false - multivalued: false slices: name: slices description: Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. range: text - required: false - multivalued: false source_script: name: source_script description: Script file or link to public source code used to create this NWB file. range: general__source_script - required: false - multivalued: false inlined: true stimulus: name: stimulus description: Notes about stimuli, such as how and where they were presented. range: text - required: false - multivalued: false surgery: name: surgery description: Narrative description about surgery/surgeries, including date(s) and who performed surgery. range: text - required: false - multivalued: false virus: name: virus description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - required: false - multivalued: false lab_meta_data: name: lab_meta_data description: Place-holder than can be extended so that lab-specific meta-data can be placed in /general. range: LabMetaData - required: false multivalued: true inlined: true inlined_as_list: false @@ -431,24 +394,18 @@ classes: description: Information about the animal or person from which the data was measured. range: Subject - required: false - multivalued: false inlined: true inlined_as_list: false extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. range: general__extracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. range: general__intracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true optogenetics: @@ -503,7 +460,6 @@ classes: name: electrode_group description: Physical group of electrodes. range: ElectrodeGroup - required: false multivalued: true inlined: true inlined_as_list: false @@ -511,8 +467,6 @@ classes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes - required: false - multivalued: false inlined: true inlined_as_list: true extracellular_ephys__electrodes: @@ -657,13 +611,10 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - required: false - multivalued: false intracellular_electrode: name: intracellular_electrode description: An intracellular electrode. range: IntracellularElectrode - required: false multivalued: true inlined: true inlined_as_list: false @@ -674,8 +625,6 @@ classes: tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.' range: SweepTable - required: false - multivalued: false inlined: true inlined_as_list: false intracellular_recordings: @@ -693,8 +642,6 @@ classes: to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. range: IntracellularRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false simultaneous_recordings: @@ -703,8 +650,6 @@ classes: the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes range: SimultaneousRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false sequential_recordings: @@ -714,8 +659,6 @@ classes: together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. range: SequentialRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false repetitions: @@ -725,8 +668,6 @@ classes: type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. range: RepetitionsTable - required: false - multivalued: false inlined: true inlined_as_list: false experimental_conditions: @@ -734,8 +675,6 @@ classes: description: A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. range: ExperimentalConditionsTable - required: false - multivalued: false inlined: true inlined_as_list: false NWBFile__intervals: @@ -756,24 +695,18 @@ classes: description: Divisions in time marking experimental stages or sub-divisions of a single recording session. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false trials: name: trials description: Repeated experimental events that have a logical grouping. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false invalid_times: name: invalid_times description: Time intervals that should be removed from analysis. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false time_intervals: @@ -781,7 +714,6 @@ classes: description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals - required: false multivalued: true inlined: true inlined_as_list: false @@ -810,56 +742,38 @@ classes: name: age description: Age of subject. Can be supplied instead of 'date_of_birth'. range: text - required: false - multivalued: false date_of_birth: name: date_of_birth description: Date of birth of subject. Can be supplied instead of 'age'. range: isodatetime - required: false - multivalued: false description: name: description description: Description of subject and where subject came from (e.g., breeder, if animal). range: text - required: false - multivalued: false genotype: name: genotype description: Genetic strain. If absent, assume Wild Type (WT). range: text - required: false - multivalued: false sex: name: sex description: Gender of subject. range: text - required: false - multivalued: false species: name: species description: Species of subject. range: text - required: false - multivalued: false strain: name: strain description: Strain of subject. range: text - required: false - multivalued: false subject_id: name: subject_id description: ID of animal/person used/participating in experiment (lab convention). range: text - required: false - multivalued: false weight: name: weight description: Weight at time of experiment, at time of surgery and at other important times. range: text - required: false - multivalued: false tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml index d3a808f..848fb69 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml @@ -41,15 +41,12 @@ classes: description: Recorded voltage or current. range: PatchClampSeries__data required: true - multivalued: false inlined: true gain: name: gain description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). range: float32 - required: false - multivalued: false electrode: name: electrode annotations: @@ -57,7 +54,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: IntracellularElectrode @@ -74,6 +70,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -99,31 +131,24 @@ classes: identifier: true range: string required: true + bias_current: + name: bias_current + description: Bias current, in amps. + range: float32 + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + range: float32 + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + range: float32 data: name: data description: Recorded voltage. range: CurrentClampSeries__data required: true - multivalued: false inlined: true - bias_current: - name: bias_current - description: Bias current, in amps. - range: float32 - required: false - multivalued: false - bridge_balance: - name: bridge_balance - description: Bridge balance, in ohms. - range: float32 - required: false - multivalued: false - capacitance_compensation: - name: capacitance_compensation - description: Capacitance compensation, in farads. - range: float32 - required: false - multivalued: false tree_root: true CurrentClampSeries__data: name: CurrentClampSeries__data @@ -136,6 +161,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -147,8 +208,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IZeroClampSeries: name: IZeroClampSeries description: Voltage data from an intracellular recording when all current and @@ -175,19 +238,16 @@ classes: description: Bias current, in amps, fixed to 0.0. range: float32 required: true - multivalued: false bridge_balance: name: bridge_balance description: Bridge balance, in ohms, fixed to 0.0. range: float32 required: true - multivalued: false capacitance_compensation: name: capacitance_compensation description: Capacitance compensation, in farads, fixed to 0.0. range: float32 required: true - multivalued: false tree_root: true CurrentClampStimulusSeries: name: CurrentClampStimulusSeries @@ -204,7 +264,6 @@ classes: description: Stimulus current applied. range: CurrentClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true CurrentClampStimulusSeries__data: @@ -218,6 +277,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -229,8 +324,10 @@ classes: equals_string: amperes value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries: name: VoltageClampSeries description: Current data from an intracellular voltage-clamp recording. A corresponding @@ -243,87 +340,48 @@ classes: identifier: true range: string required: true - data: - name: data - description: Recorded current. - range: VoltageClampSeries__data - required: true - multivalued: false - inlined: true capacitance_fast: name: capacitance_fast description: Fast capacitance, in farads. range: VoltageClampSeries__capacitance_fast - required: false - multivalued: false inlined: true capacitance_slow: name: capacitance_slow description: Slow capacitance, in farads. range: VoltageClampSeries__capacitance_slow - required: false - multivalued: false + inlined: true + data: + name: data + description: Recorded current. + range: VoltageClampSeries__data + required: true inlined: true resistance_comp_bandwidth: name: resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. range: VoltageClampSeries__resistance_comp_bandwidth - required: false - multivalued: false inlined: true resistance_comp_correction: name: resistance_comp_correction description: Resistance compensation correction, in percent. range: VoltageClampSeries__resistance_comp_correction - required: false - multivalued: false inlined: true resistance_comp_prediction: name: resistance_comp_prediction description: Resistance compensation prediction, in percent. range: VoltageClampSeries__resistance_comp_prediction - required: false - multivalued: false inlined: true whole_cell_capacitance_comp: name: whole_cell_capacitance_comp description: Whole cell capacitance compensation, in farads. range: VoltageClampSeries__whole_cell_capacitance_comp - required: false - multivalued: false inlined: true whole_cell_series_resistance_comp: name: whole_cell_series_resistance_comp description: Whole cell series resistance compensation, in ohms. range: VoltageClampSeries__whole_cell_series_resistance_comp - required: false - multivalued: false inlined: true tree_root: true - VoltageClampSeries__data: - name: VoltageClampSeries__data - description: Recorded current. - attributes: - name: - name: name - ifabsent: string(data) - identifier: true - range: string - required: true - equals_string: data - unit: - name: unit - description: Base unit of measurement for working with the data. which is - fixed to 'amperes'. Actual stored values are not necessarily stored in these - units. To access the data in these units, multiply 'data' by 'conversion'. - ifabsent: string(amperes) - range: text - required: true - equals_string: amperes - value: - name: value - range: AnyType - required: true VoltageClampSeries__capacitance_fast: name: VoltageClampSeries__capacitance_fast description: Fast capacitance, in farads. @@ -368,6 +426,68 @@ classes: name: value range: float32 required: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) + range: text + required: true + equals_string: amperes + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries__resistance_comp_bandwidth: name: VoltageClampSeries__resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. @@ -498,7 +618,6 @@ classes: description: Stimulus voltage applied. range: VoltageClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true VoltageClampStimulusSeries__data: @@ -512,6 +631,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -523,8 +678,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IntracellularElectrode: name: IntracellularElectrode description: An intracellular electrode and its metadata. @@ -540,45 +697,32 @@ classes: description: Description of electrode (e.g., whole-cell, sharp, etc.). range: text required: true - multivalued: false filtering: name: filtering description: Electrode specific filtering. range: text - required: false - multivalued: false initial_access_resistance: name: initial_access_resistance description: Initial access resistance. range: text - required: false - multivalued: false location: name: location description: Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. range: text - required: false - multivalued: false resistance: name: resistance description: Electrode resistance, in ohms. range: text - required: false - multivalued: false seal: name: seal description: Information about seal used for recording. range: text - required: false - multivalued: false slice: name: slice description: Information about slice used for recording. range: text - required: false - multivalued: false device: name: device annotations: @@ -586,7 +730,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -605,15 +748,6 @@ classes: identifier: true range: string required: true - sweep_number: - name: sweep_number - description: Sweep number of the PatchClampSeries in that row. - array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: uint32 - required: true - multivalued: false series: name: series description: The PatchClampSeries with the sweep number in that row. @@ -636,8 +770,16 @@ classes: description: Index for series. range: VectorIndex required: true - multivalued: false inlined: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + array: + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: uint32 + required: true + multivalued: false tree_root: true IntracellularElectrodesTable: name: IntracellularElectrodesTable @@ -697,7 +839,6 @@ classes: recording (rows). range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularResponsesTable: @@ -730,7 +871,6 @@ classes: recording (rows) range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularRecordingsTable: @@ -772,15 +912,6 @@ classes: description: Table for storing intracellular electrode related metadata. range: IntracellularElectrodesTable required: true - multivalued: false - inlined: true - inlined_as_list: false - stimuli: - name: stimuli - description: Table for storing intracellular stimulus related metadata. - range: IntracellularStimuliTable - required: true - multivalued: false inlined: true inlined_as_list: false responses: @@ -788,7 +919,13 @@ classes: description: Table for storing intracellular response related metadata. range: IntracellularResponsesTable required: true - multivalued: false + inlined: true + inlined_as_list: false + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + range: IntracellularStimuliTable + required: true inlined: true inlined_as_list: false tree_root: true @@ -812,7 +949,6 @@ classes: table. range: SimultaneousRecordingsTable__recordings required: true - multivalued: false inlined: true recordings_index: name: recordings_index @@ -826,7 +962,6 @@ classes: description: Index dataset for the recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true SimultaneousRecordingsTable__recordings: @@ -871,7 +1006,6 @@ classes: table. range: SequentialRecordingsTable__simultaneous_recordings required: true - multivalued: false inlined: true simultaneous_recordings_index: name: simultaneous_recordings_index @@ -885,7 +1019,6 @@ classes: description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true - multivalued: false inlined: true stimulus_type: name: stimulus_type @@ -939,7 +1072,6 @@ classes: table. range: RepetitionsTable__sequential_recordings required: true - multivalued: false inlined: true sequential_recordings_index: name: sequential_recordings_index @@ -953,7 +1085,6 @@ classes: description: Index dataset for the sequential_recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true RepetitionsTable__sequential_recordings: @@ -995,7 +1126,6 @@ classes: description: A reference to one or more rows in the RepetitionsTable table. range: ExperimentalConditionsTable__repetitions required: true - multivalued: false inlined: true repetitions_index: name: repetitions_index @@ -1009,7 +1139,6 @@ classes: description: Index dataset for the repetitions column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true ExperimentalConditionsTable__repetitions: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml index fec75ec..716c087 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.image.yaml @@ -91,21 +91,9 @@ classes: name: data description: Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. - range: numeric + range: ImageSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - alias: z + inlined: true dimension: name: dimension description: Number of pixels on x, y, (and z) axes. @@ -123,8 +111,6 @@ classes: used if the image is stored in another NWB file and that file is linked to this file. range: ImageSeries__external_file - required: false - multivalued: false inlined: true format: name: format @@ -132,22 +118,89 @@ classes: contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + ifabsent: string(raw) range: text - required: false - multivalued: false device: name: device annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Device - range: string tree_root: true + ImageSeries__data: + name: ImageSeries__data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z ImageSeries__external_file: name: ImageSeries__external_file description: Paths to one or more external file(s). The field is only present @@ -206,7 +259,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries @@ -226,12 +278,16 @@ classes: identifier: true range: string required: true + data: + name: data + description: Images presented to subject, either grayscale or RGB + range: OpticalSeries__data + required: true + inlined: true distance: name: distance description: Distance from camera/monitor to target/eye. range: float32 - required: false - multivalued: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -247,12 +303,69 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 - data: - name: data - description: Images presented to subject, either grayscale or RGB - range: numeric + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + range: text + tree_root: true + OpticalSeries__data: + name: OpticalSeries__data + description: Images presented to subject, either grayscale or RGB + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string required: true - multivalued: false + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + value: + name: value + range: numeric any_of: - array: dimensions: @@ -266,14 +379,6 @@ classes: - alias: y - alias: r_g_b exact_cardinality: 3 - orientation: - name: orientation - description: Description of image relative to some reference frame (e.g., - which way is up). Must also specify frame of reference. - range: text - required: false - multivalued: false - tree_root: true IndexSeries: name: IndexSeries description: Stores indices to image frames stored in an ImageSeries. The purpose @@ -292,12 +397,9 @@ classes: data: name: data description: Index of the frame in the referenced ImageSeries. - array: - dimensions: - - alias: num_times - range: int32 + range: IndexSeries__data required: true - multivalued: false + inlined: true indexed_timeseries: name: indexed_timeseries annotations: @@ -305,9 +407,68 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries - range: string tree_root: true + IndexSeries__data: + name: IndexSeries__data + description: Index of the frame in the referenced ImageSeries. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + array: + dimensions: + - alias: num_times + range: int32 diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml index ec02fc4..5299631 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.misc.yaml @@ -38,7 +38,6 @@ classes: description: Values of each feature at each time. range: AbstractFeatureSeries__data required: true - multivalued: false inlined: true feature_units: name: feature_units @@ -70,6 +69,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Since there can be different units for different features, store @@ -105,13 +140,70 @@ classes: data: name: data description: Annotations made during an experiment. + range: AnnotationSeries__data + required: true + inlined: true + tree_root: true + AnnotationSeries__data: + name: AnnotationSeries__data + description: Annotations made during an experiment. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: text - required: true - multivalued: false - tree_root: true IntervalSeries: name: IntervalSeries description: Stores intervals of data. The timestamps field stores the beginning @@ -131,13 +223,70 @@ classes: data: name: data description: Use values >0 if interval started, <0 if interval ended. + range: IntervalSeries__data + required: true + inlined: true + tree_root: true + IntervalSeries__data: + name: IntervalSeries__data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: int8 - required: true - multivalued: false - tree_root: true DecompositionSeries: name: DecompositionSeries description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -153,14 +302,12 @@ classes: description: Data decomposed into frequency bands. range: DecompositionSeries__data required: true - multivalued: false inlined: true metric: name: metric description: The metric used, e.g. phase, amplitude, power. range: text required: true - multivalued: false source_channels: name: source_channels annotations: @@ -173,8 +320,6 @@ classes: description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion - required: false - multivalued: false inlined: true bands: name: bands @@ -182,7 +327,6 @@ classes: from. There should be one row in this table for each band. range: DecompositionSeries__bands required: true - multivalued: false inlined: true inlined_as_list: true source_timeseries: @@ -191,8 +335,6 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: TimeSeries @@ -209,6 +351,42 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -289,63 +467,13 @@ classes: identifier: true range: string required: true - spike_times_index: - name: spike_times_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the spike_times dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - spike_times: - name: spike_times - description: Spike times for each unit. - range: Units__spike_times - required: false - multivalued: false - inlined: true - obs_intervals_index: - name: obs_intervals_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the obs_intervals dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - obs_intervals: - name: obs_intervals - description: Observation intervals for each unit. + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. array: - dimensions: - - alias: num_intervals - - alias: start_end - exact_cardinality: 2 - range: float64 - required: false - multivalued: false - electrodes_index: - name: electrodes_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into electrodes. - range: VectorIndex + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: ElectrodeGroup required: false multivalued: false inlined: true @@ -360,51 +488,69 @@ classes: value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion - required: false - multivalued: false inlined: true - electrode_group: - name: electrode_group - description: Electrode group that each spike unit came from. + electrodes_index: + name: electrodes_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into electrodes. + range: VectorIndex + inlined: true + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: ElectrodeGroup + dimensions: + - alias: num_intervals + - alias: start_end + exact_cardinality: 2 + range: float64 required: false multivalued: false + obs_intervals_index: + name: obs_intervals_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the obs_intervals dataset. + range: VectorIndex + inlined: true + spike_times: + name: spike_times + description: Spike times for each unit. + range: Units__spike_times + inlined: true + spike_times_index: + name: spike_times_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the spike_times dataset. + range: VectorIndex inlined: true waveform_mean: name: waveform_mean description: Spike waveform mean for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_mean + inlined: true waveform_sd: name: waveform_sd description: Spike waveform standard deviation for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_sd + inlined: true waveforms: name: waveforms description: Individual waveforms for each spike on each electrode. This is @@ -430,13 +576,8 @@ classes: order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - array: - dimensions: - - alias: num_waveforms - - alias: num_samples - range: numeric - required: false - multivalued: false + range: Units__waveforms + inlined: true waveforms_index: name: waveforms_index annotations: @@ -449,8 +590,6 @@ classes: description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true waveforms_index_index: name: waveforms_index_index @@ -464,8 +603,6 @@ classes: description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true Units__spike_times: @@ -489,3 +626,97 @@ classes: for the spike time to be between samples. range: float64 required: false + Units__waveform_mean: + name: Units__waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_mean) + identifier: true + range: string + required: true + equals_string: waveform_mean + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveform_sd: + name: Units__waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_sd) + identifier: true + range: string + required: true + equals_string: waveform_sd + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveforms: + name: Units__waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveforms) + identifier: true + range: string + required: true + equals_string: waveforms + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml index cbe1a6d..67986b2 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml @@ -27,12 +27,9 @@ classes: data: name: data description: Applied power for optogenetic stimulus, in watts. - array: - dimensions: - - alias: num_times - range: numeric + range: OptogeneticSeries__data required: true - multivalued: false + inlined: true site: name: site annotations: @@ -40,12 +37,71 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: OptogeneticStimulusSite - range: string tree_root: true + OptogeneticSeries__data: + name: OptogeneticSeries__data + description: Applied power for optogenetic stimulus, in watts. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + ifabsent: string(watts) + range: text + required: true + equals_string: watts + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric OptogeneticStimulusSite: name: OptogeneticStimulusSite description: A site of optogenetic stimulation. @@ -61,13 +117,11 @@ classes: description: Description of stimulation site. range: text required: true - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false location: name: location description: Location of the stimulation site. Specify the area, layer, comments @@ -75,7 +129,6 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false device: name: device annotations: @@ -83,7 +136,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml index aec8547..c87f8be 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml @@ -60,7 +60,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -80,17 +79,9 @@ classes: data: name: data description: Signals from ROIs. - range: numeric + range: RoiResponseSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_rois + inlined: true rois: name: rois annotations: @@ -104,9 +95,73 @@ classes: on the ROIs stored in this timeseries. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true + RoiResponseSeries__data: + name: RoiResponseSeries__data + description: Signals from ROIs. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois DfOverF: name: DfOverF description: dF/F information about a region of interest (ROI). Storage hierarchy @@ -182,6 +237,13 @@ classes: - alias: num_x - alias: num_y - alias: num_z + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + range: PlaneSegmentation__pixel_mask + inlined: true pixel_mask_index: name: pixel_mask_index annotations: @@ -193,17 +255,13 @@ classes: value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex - required: false - multivalued: false inlined: true - pixel_mask: - name: pixel_mask - description: 'Pixel masks for each ROI: a list of indices and weights for - the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' - range: PlaneSegmentation__pixel_mask - required: false - multivalued: false + range: PlaneSegmentation__voxel_mask inlined: true voxel_mask_index: name: voxel_mask_index @@ -216,17 +274,6 @@ classes: value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex - required: false - multivalued: false - inlined: true - voxel_mask: - name: voxel_mask - description: 'Voxel masks for each ROI: a list of indices and weights for - the ROI. Voxel masks are concatenated and parsing of this dataset is maintained - by the PlaneSegmentation' - range: PlaneSegmentation__voxel_mask - required: false - multivalued: false inlined: true reference_images: name: reference_images @@ -243,7 +290,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -269,24 +315,18 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Pixel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the pixel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false PlaneSegmentation__voxel_mask: name: PlaneSegmentation__voxel_mask description: 'Voxel masks for each ROI: a list of indices and weights for the @@ -307,32 +347,24 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Voxel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false z: name: z description: Voxel z-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the voxel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ImagingPlane: name: ImagingPlane description: An imaging plane and its metadata. @@ -347,27 +379,21 @@ classes: name: description description: Description of the imaging plane. range: text - required: false - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false imaging_rate: name: imaging_rate description: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. range: float32 - required: false - multivalued: false indicator: name: indicator description: Calcium indicator. range: text required: true - multivalued: false location: name: location description: Location of the imaging plane. Specify the area, layer, comments @@ -375,15 +401,12 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false manifold: name: manifold description: DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. range: ImagingPlane__manifold - required: false - multivalued: false inlined: true origin_coords: name: origin_coords @@ -391,8 +414,6 @@ classes: 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). range: ImagingPlane__origin_coords - required: false - multivalued: false inlined: true grid_spacing: name: grid_spacing @@ -400,8 +421,6 @@ classes: in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. range: ImagingPlane__grid_spacing - required: false - multivalued: false inlined: true reference_frame: name: reference_frame @@ -423,8 +442,6 @@ classes: axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." range: text - required: false - multivalued: false optical_channel: name: optical_channel description: An optical channel used to record from an imaging plane. @@ -440,7 +457,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -573,13 +589,11 @@ classes: description: Description or other notes about the channel. range: text required: true - multivalued: false emission_lambda: name: emission_lambda description: Emission wavelength for channel, in nm. range: float32 required: true - multivalued: false tree_root: true MotionCorrection: name: MotionCorrection @@ -610,7 +624,6 @@ classes: description: Image stack with frames shifted to the common coordinates. range: ImageSeries required: true - multivalued: false inlined: true inlined_as_list: false xy_translation: @@ -619,7 +632,6 @@ classes: coordinates, for example, to align each frame to a reference image. range: TimeSeries required: true - multivalued: false inlined: true inlined_as_list: false original: @@ -629,7 +641,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml index f30f06f..1b75917 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml @@ -37,30 +37,24 @@ classes: description: Phase response to stimulus on the first measured axis. range: ImagingRetinotopy__axis_1_phase_map required: true - multivalued: false inlined: true axis_1_power_map: name: axis_1_power_map description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_1_power_map - required: false - multivalued: false inlined: true axis_2_phase_map: name: axis_2_phase_map description: Phase response to stimulus on the second measured axis. range: ImagingRetinotopy__axis_2_phase_map required: true - multivalued: false inlined: true axis_2_power_map: name: axis_2_power_map description: Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_2_power_map - required: false - multivalued: false inlined: true axis_descriptions: name: axis_descriptions @@ -79,16 +73,12 @@ classes: description: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].' range: ImagingRetinotopy__focal_depth_image - required: false - multivalued: false inlined: true sign_map: name: sign_map description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: ImagingRetinotopy__sign_map - required: false - multivalued: false inlined: true vasculature_image: name: vasculature_image @@ -96,7 +86,6 @@ classes: [rows][columns]' range: ImagingRetinotopy__vasculature_image required: true - multivalued: false inlined: true tree_root: true ImagingRetinotopy__axis_1_phase_map: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml index 547dd4c..5809bb5 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml @@ -47,7 +47,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -56,7 +55,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to @@ -64,7 +62,6 @@ classes: exact_number_dimensions: 1 range: TimeSeries required: true - multivalued: false inlined: true tree_root: true Image: @@ -189,7 +186,6 @@ classes: external file. range: TimeSeries__data required: true - multivalued: false inlined: true starting_time: name: starting_time @@ -197,8 +193,6 @@ classes: uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. range: TimeSeries__starting_time - required: false - multivalued: false inlined: true timestamps: name: timestamps @@ -241,8 +235,6 @@ classes: external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. range: TimeSeries__sync - required: false - multivalued: false inlined: true inlined_as_list: true tree_root: true @@ -429,7 +421,5 @@ classes: and only once, so the dataset should have the same length as the number of images. range: ImageReferences - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml index 94ff5f8..9db47e3 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml @@ -38,14 +38,11 @@ classes: reference frame. range: SpatialSeries__data required: true - multivalued: false inlined: true reference_frame: name: reference_frame description: Description defining what exactly 'straight-ahead' means. range: text - required: false - multivalued: false tree_root: true SpatialSeries__data: name: SpatialSeries__data @@ -59,6 +56,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml index b611d74..6700cc0 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml @@ -39,40 +39,6 @@ classes: about the filter properties as possible. range: text required: false - data: - name: data - description: Recorded voltage data. - range: numeric - required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_channels - - array: - dimensions: - - alias: num_times - - alias: num_channels - - alias: num_samples - electrodes: - name: electrodes - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: DynamicTableRegion pointer to the electrodes that this time series - was generated from. - range: DynamicTableRegion - required: true - multivalued: false - inlined: true channel_conversion: name: channel_conversion description: Channel-specific conversion factor. Multiply the data in the @@ -90,7 +56,109 @@ classes: range: float32 required: false multivalued: false + data: + name: data + description: Recorded voltage data. + range: ElectricalSeries__data + required: true + inlined: true + electrodes: + name: electrodes + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + range: DynamicTableRegion + required: true + inlined: true tree_root: true + ElectricalSeries__data: + name: ElectricalSeries__data + description: Recorded voltage data. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion', + followed by 'channel_conversion' (if present), and then add 'offset'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples SpikeEventSeries: name: SpikeEventSeries description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold @@ -111,19 +179,9 @@ classes: data: name: data description: Spike waveforms. - range: numeric + range: SpikeEventSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_events - - alias: num_samples - - array: - dimensions: - - alias: num_events - - alias: num_channels - - alias: num_samples + inlined: true timestamps: name: timestamps description: Timestamps for samples stored in data, in seconds, relative to @@ -137,6 +195,82 @@ classes: required: true multivalued: false tree_root: true + SpikeEventSeries__data: + name: SpikeEventSeries__data + description: Spike waveforms. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples FeatureExtraction: name: FeatureExtraction description: Features, such as PC1 and PC2, that are extracted from signals stored @@ -192,7 +326,6 @@ classes: was generated from. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true EventDetection: @@ -212,7 +345,6 @@ classes: or dV/dT threshold, as well as relevant values. range: text required: true - multivalued: false source_idx: name: source_idx description: Indices (zero-based) into source ElectricalSeries::data array @@ -241,7 +373,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ElectricalSeries @@ -323,8 +454,6 @@ classes: name: position description: stereotaxic or common framework coordinates range: ElectrodeGroup__position - required: false - multivalued: false inlined: true device: name: device @@ -333,7 +462,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -356,24 +484,18 @@ classes: array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false y: name: y description: y coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false z: name: z description: z coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ClusterWaveforms: name: ClusterWaveforms description: DEPRECATED The mean waveform shape, including standard deviation, @@ -395,7 +517,6 @@ classes: description: Filtering applied to data before generating mean/sd range: text required: true - multivalued: false waveform_mean: name: waveform_mean description: The mean waveform for each cluster, using the same indices for @@ -427,7 +548,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Clustering @@ -451,7 +571,6 @@ classes: clusters curated using Klusters, etc) range: text required: true - multivalued: false num: name: num description: Cluster number of each event diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml index 9857394..81a3ca5 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml @@ -63,8 +63,6 @@ classes: value: neurodata_type_inc description: Index for tags. range: VectorIndex - required: false - multivalued: false inlined: true timeseries: name: timeseries @@ -77,8 +75,6 @@ classes: value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData - required: false - multivalued: false inlined: true timeseries_index: name: timeseries_index @@ -91,7 +87,5 @@ classes: value: neurodata_type_inc description: Index for timeseries. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml index 01ef5b5..e42c8b3 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml @@ -81,13 +81,11 @@ classes: other files. range: text required: true - multivalued: false session_description: name: session_description description: A description of the experimental session and data in the file. range: text required: true - multivalued: false session_start_time: name: session_start_time description: 'Date and time of the experiment/session start. The date is stored @@ -96,7 +94,6 @@ classes: offset. Date accuracy is up to milliseconds.' range: isodatetime required: true - multivalued: false timestamps_reference_time: name: timestamps_reference_time description: 'Date and time corresponding to time zero of all timestamps. @@ -106,7 +103,6 @@ classes: times stored in the file use this time as reference (i.e., time zero).' range: isodatetime required: true - multivalued: false acquisition: name: acquisition description: Data streams recorded from the system, including ephys, ophys, @@ -185,7 +181,6 @@ classes: can exist in the present file or can be linked to a remote library file. range: NWBFile__stimulus required: true - multivalued: false inlined: true inlined_as_list: true general: @@ -207,7 +202,6 @@ classes: should not be created unless there is data to store within them. range: NWBFile__general required: true - multivalued: false inlined: true inlined_as_list: true intervals: @@ -217,16 +211,12 @@ classes: an experiment, or epochs (see epochs subgroup) deriving from analysis of data. range: NWBFile__intervals - required: false - multivalued: false inlined: true inlined_as_list: true units: name: units description: Data about sorted spike units. range: Units - required: false - multivalued: false inlined: true inlined_as_list: false tree_root: true @@ -300,14 +290,10 @@ classes: name: data_collection description: Notes about data collection and analysis. range: text - required: false - multivalued: false experiment_description: name: experiment_description description: General description of the experiment. range: text - required: false - multivalued: false experimenter: name: experimenter description: Name of person(s) who performed the experiment. Can also specify @@ -322,8 +308,6 @@ classes: name: institution description: Institution(s) where experiment was performed. range: text - required: false - multivalued: false keywords: name: keywords description: Terms to search over. @@ -337,28 +321,20 @@ classes: name: lab description: Laboratory where experiment was performed. range: text - required: false - multivalued: false notes: name: notes description: Notes about the experiment. range: text - required: false - multivalued: false pharmacology: name: pharmacology description: Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. range: text - required: false - multivalued: false protocol: name: protocol description: Experimental protocol, if applicable. e.g., include IACUC protocol number. range: text - required: false - multivalued: false related_publications: name: related_publications description: Publication information. PMID, DOI, URL, etc. @@ -372,49 +348,36 @@ classes: name: session_id description: Lab-specific ID for the session. range: text - required: false - multivalued: false slices: name: slices description: Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. range: text - required: false - multivalued: false source_script: name: source_script description: Script file or link to public source code used to create this NWB file. range: general__source_script - required: false - multivalued: false inlined: true stimulus: name: stimulus description: Notes about stimuli, such as how and where they were presented. range: text - required: false - multivalued: false surgery: name: surgery description: Narrative description about surgery/surgeries, including date(s) and who performed surgery. range: text - required: false - multivalued: false virus: name: virus description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - required: false - multivalued: false lab_meta_data: name: lab_meta_data description: Place-holder than can be extended so that lab-specific meta-data can be placed in /general. range: LabMetaData - required: false multivalued: true inlined: true inlined_as_list: false @@ -432,24 +395,18 @@ classes: description: Information about the animal or person from which the data was measured. range: Subject - required: false - multivalued: false inlined: true inlined_as_list: false extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. range: general__extracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. range: general__intracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true optogenetics: @@ -504,7 +461,6 @@ classes: name: electrode_group description: Physical group of electrodes. range: ElectrodeGroup - required: false multivalued: true inlined: true inlined_as_list: false @@ -512,8 +468,6 @@ classes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes - required: false - multivalued: false inlined: true inlined_as_list: true extracellular_ephys__electrodes: @@ -660,13 +614,10 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - required: false - multivalued: false intracellular_electrode: name: intracellular_electrode description: An intracellular electrode. range: IntracellularElectrode - required: false multivalued: true inlined: true inlined_as_list: false @@ -677,8 +628,6 @@ classes: tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.' range: SweepTable - required: false - multivalued: false inlined: true inlined_as_list: false intracellular_recordings: @@ -696,8 +645,6 @@ classes: to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. range: IntracellularRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false simultaneous_recordings: @@ -706,8 +653,6 @@ classes: the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes range: SimultaneousRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false sequential_recordings: @@ -717,8 +662,6 @@ classes: together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. range: SequentialRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false repetitions: @@ -728,8 +671,6 @@ classes: type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. range: RepetitionsTable - required: false - multivalued: false inlined: true inlined_as_list: false experimental_conditions: @@ -737,8 +678,6 @@ classes: description: A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. range: ExperimentalConditionsTable - required: false - multivalued: false inlined: true inlined_as_list: false NWBFile__intervals: @@ -759,24 +698,18 @@ classes: description: Divisions in time marking experimental stages or sub-divisions of a single recording session. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false trials: name: trials description: Repeated experimental events that have a logical grouping. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false invalid_times: name: invalid_times description: Time intervals that should be removed from analysis. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false time_intervals: @@ -784,7 +717,6 @@ classes: description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals - required: false multivalued: true inlined: true inlined_as_list: false @@ -813,56 +745,38 @@ classes: name: age description: Age of subject. Can be supplied instead of 'date_of_birth'. range: text - required: false - multivalued: false date_of_birth: name: date_of_birth description: Date of birth of subject. Can be supplied instead of 'age'. range: isodatetime - required: false - multivalued: false description: name: description description: Description of subject and where subject came from (e.g., breeder, if animal). range: text - required: false - multivalued: false genotype: name: genotype description: Genetic strain. If absent, assume Wild Type (WT). range: text - required: false - multivalued: false sex: name: sex description: Gender of subject. range: text - required: false - multivalued: false species: name: species description: Species of subject. range: text - required: false - multivalued: false strain: name: strain description: Strain of subject. range: text - required: false - multivalued: false subject_id: name: subject_id description: ID of animal/person used/participating in experiment (lab convention). range: text - required: false - multivalued: false weight: name: weight description: Weight at time of experiment, at time of surgery and at other important times. range: text - required: false - multivalued: false tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml index 257b07b..cc89c87 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml @@ -41,15 +41,12 @@ classes: description: Recorded voltage or current. range: PatchClampSeries__data required: true - multivalued: false inlined: true gain: name: gain description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). range: float32 - required: false - multivalued: false electrode: name: electrode annotations: @@ -57,7 +54,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: IntracellularElectrode @@ -74,6 +70,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -99,31 +140,24 @@ classes: identifier: true range: string required: true + bias_current: + name: bias_current + description: Bias current, in amps. + range: float32 + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + range: float32 + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + range: float32 data: name: data description: Recorded voltage. range: CurrentClampSeries__data required: true - multivalued: false inlined: true - bias_current: - name: bias_current - description: Bias current, in amps. - range: float32 - required: false - multivalued: false - bridge_balance: - name: bridge_balance - description: Bridge balance, in ohms. - range: float32 - required: false - multivalued: false - capacitance_compensation: - name: capacitance_compensation - description: Capacitance compensation, in farads. - range: float32 - required: false - multivalued: false tree_root: true CurrentClampSeries__data: name: CurrentClampSeries__data @@ -136,6 +170,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -148,8 +227,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IZeroClampSeries: name: IZeroClampSeries description: Voltage data from an intracellular recording when all current and @@ -176,19 +257,16 @@ classes: description: Bias current, in amps, fixed to 0.0. range: float32 required: true - multivalued: false bridge_balance: name: bridge_balance description: Bridge balance, in ohms, fixed to 0.0. range: float32 required: true - multivalued: false capacitance_compensation: name: capacitance_compensation description: Capacitance compensation, in farads, fixed to 0.0. range: float32 required: true - multivalued: false tree_root: true CurrentClampStimulusSeries: name: CurrentClampStimulusSeries @@ -205,7 +283,6 @@ classes: description: Stimulus current applied. range: CurrentClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true CurrentClampStimulusSeries__data: @@ -219,6 +296,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -231,8 +353,10 @@ classes: equals_string: amperes value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries: name: VoltageClampSeries description: Current data from an intracellular voltage-clamp recording. A corresponding @@ -245,88 +369,48 @@ classes: identifier: true range: string required: true - data: - name: data - description: Recorded current. - range: VoltageClampSeries__data - required: true - multivalued: false - inlined: true capacitance_fast: name: capacitance_fast description: Fast capacitance, in farads. range: VoltageClampSeries__capacitance_fast - required: false - multivalued: false inlined: true capacitance_slow: name: capacitance_slow description: Slow capacitance, in farads. range: VoltageClampSeries__capacitance_slow - required: false - multivalued: false + inlined: true + data: + name: data + description: Recorded current. + range: VoltageClampSeries__data + required: true inlined: true resistance_comp_bandwidth: name: resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. range: VoltageClampSeries__resistance_comp_bandwidth - required: false - multivalued: false inlined: true resistance_comp_correction: name: resistance_comp_correction description: Resistance compensation correction, in percent. range: VoltageClampSeries__resistance_comp_correction - required: false - multivalued: false inlined: true resistance_comp_prediction: name: resistance_comp_prediction description: Resistance compensation prediction, in percent. range: VoltageClampSeries__resistance_comp_prediction - required: false - multivalued: false inlined: true whole_cell_capacitance_comp: name: whole_cell_capacitance_comp description: Whole cell capacitance compensation, in farads. range: VoltageClampSeries__whole_cell_capacitance_comp - required: false - multivalued: false inlined: true whole_cell_series_resistance_comp: name: whole_cell_series_resistance_comp description: Whole cell series resistance compensation, in ohms. range: VoltageClampSeries__whole_cell_series_resistance_comp - required: false - multivalued: false inlined: true tree_root: true - VoltageClampSeries__data: - name: VoltageClampSeries__data - description: Recorded current. - attributes: - name: - name: name - ifabsent: string(data) - identifier: true - range: string - required: true - equals_string: data - unit: - name: unit - description: Base unit of measurement for working with the data. which is - fixed to 'amperes'. Actual stored values are not necessarily stored in these - units. To access the data in these units, multiply 'data' by 'conversion' - and add 'offset'. - ifabsent: string(amperes) - range: text - required: true - equals_string: amperes - value: - name: value - range: AnyType - required: true VoltageClampSeries__capacitance_fast: name: VoltageClampSeries__capacitance_fast description: Fast capacitance, in farads. @@ -371,6 +455,78 @@ classes: name: value range: float32 required: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + ifabsent: string(amperes) + range: text + required: true + equals_string: amperes + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries__resistance_comp_bandwidth: name: VoltageClampSeries__resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. @@ -501,7 +657,6 @@ classes: description: Stimulus voltage applied. range: VoltageClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true VoltageClampStimulusSeries__data: @@ -515,6 +670,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -527,8 +727,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IntracellularElectrode: name: IntracellularElectrode description: An intracellular electrode and its metadata. @@ -543,52 +745,37 @@ classes: name: cell_id description: unique ID of the cell range: text - required: false - multivalued: false description: name: description description: Description of electrode (e.g., whole-cell, sharp, etc.). range: text required: true - multivalued: false filtering: name: filtering description: Electrode specific filtering. range: text - required: false - multivalued: false initial_access_resistance: name: initial_access_resistance description: Initial access resistance. range: text - required: false - multivalued: false location: name: location description: Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. range: text - required: false - multivalued: false resistance: name: resistance description: Electrode resistance, in ohms. range: text - required: false - multivalued: false seal: name: seal description: Information about seal used for recording. range: text - required: false - multivalued: false slice: name: slice description: Information about slice used for recording. range: text - required: false - multivalued: false device: name: device annotations: @@ -596,7 +783,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -615,15 +801,6 @@ classes: identifier: true range: string required: true - sweep_number: - name: sweep_number - description: Sweep number of the PatchClampSeries in that row. - array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: uint32 - required: true - multivalued: false series: name: series description: The PatchClampSeries with the sweep number in that row. @@ -646,8 +823,16 @@ classes: description: Index for series. range: VectorIndex required: true - multivalued: false inlined: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + array: + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: uint32 + required: true + multivalued: false tree_root: true IntracellularElectrodesTable: name: IntracellularElectrodesTable @@ -707,7 +892,6 @@ classes: recording (rows). range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularResponsesTable: @@ -740,7 +924,6 @@ classes: recording (rows) range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularRecordingsTable: @@ -782,15 +965,6 @@ classes: description: Table for storing intracellular electrode related metadata. range: IntracellularElectrodesTable required: true - multivalued: false - inlined: true - inlined_as_list: false - stimuli: - name: stimuli - description: Table for storing intracellular stimulus related metadata. - range: IntracellularStimuliTable - required: true - multivalued: false inlined: true inlined_as_list: false responses: @@ -798,7 +972,13 @@ classes: description: Table for storing intracellular response related metadata. range: IntracellularResponsesTable required: true - multivalued: false + inlined: true + inlined_as_list: false + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + range: IntracellularStimuliTable + required: true inlined: true inlined_as_list: false tree_root: true @@ -822,7 +1002,6 @@ classes: table. range: SimultaneousRecordingsTable__recordings required: true - multivalued: false inlined: true recordings_index: name: recordings_index @@ -836,7 +1015,6 @@ classes: description: Index dataset for the recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true SimultaneousRecordingsTable__recordings: @@ -881,7 +1059,6 @@ classes: table. range: SequentialRecordingsTable__simultaneous_recordings required: true - multivalued: false inlined: true simultaneous_recordings_index: name: simultaneous_recordings_index @@ -895,7 +1072,6 @@ classes: description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true - multivalued: false inlined: true stimulus_type: name: stimulus_type @@ -949,7 +1125,6 @@ classes: table. range: RepetitionsTable__sequential_recordings required: true - multivalued: false inlined: true sequential_recordings_index: name: sequential_recordings_index @@ -963,7 +1138,6 @@ classes: description: Index dataset for the sequential_recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true RepetitionsTable__sequential_recordings: @@ -1005,7 +1179,6 @@ classes: description: A reference to one or more rows in the RepetitionsTable table. range: ExperimentalConditionsTable__repetitions required: true - multivalued: false inlined: true repetitions_index: name: repetitions_index @@ -1019,7 +1192,6 @@ classes: description: Index dataset for the repetitions column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true ExperimentalConditionsTable__repetitions: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml index dd4d2f4..1d9c427 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.image.yaml @@ -91,21 +91,9 @@ classes: name: data description: Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. - range: numeric + range: ImageSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - alias: z + inlined: true dimension: name: dimension description: Number of pixels on x, y, (and z) axes. @@ -123,8 +111,6 @@ classes: used if the image is stored in another NWB file and that file is linked to this file. range: ImageSeries__external_file - required: false - multivalued: false inlined: true format: name: format @@ -132,22 +118,98 @@ classes: contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + ifabsent: string(raw) range: text - required: false - multivalued: false device: name: device annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Device - range: string tree_root: true + ImageSeries__data: + name: ImageSeries__data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z ImageSeries__external_file: name: ImageSeries__external_file description: Paths to one or more external file(s). The field is only present @@ -206,7 +268,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries @@ -226,12 +287,16 @@ classes: identifier: true range: string required: true + data: + name: data + description: Images presented to subject, either grayscale or RGB + range: OpticalSeries__data + required: true + inlined: true distance: name: distance description: Distance from camera/monitor to target/eye. range: float32 - required: false - multivalued: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -247,12 +312,78 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 - data: - name: data - description: Images presented to subject, either grayscale or RGB - range: numeric + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + range: text + tree_root: true + OpticalSeries__data: + name: OpticalSeries__data + description: Images presented to subject, either grayscale or RGB + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string required: true - multivalued: false + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + value: + name: value + range: numeric any_of: - array: dimensions: @@ -266,14 +397,6 @@ classes: - alias: y - alias: r_g_b exact_cardinality: 3 - orientation: - name: orientation - description: Description of image relative to some reference frame (e.g., - which way is up). Must also specify frame of reference. - range: text - required: false - multivalued: false - tree_root: true IndexSeries: name: IndexSeries description: Stores indices to image frames stored in an ImageSeries. The purpose @@ -294,20 +417,15 @@ classes: name: data description: Index of the image (using zero-indexing) in the linked Images object. - array: - dimensions: - - alias: num_times - range: uint32 + range: IndexSeries__data required: true - multivalued: false + inlined: true indexed_timeseries: name: indexed_timeseries annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: ImageSeries @@ -318,10 +436,62 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Images - range: string tree_root: true + IndexSeries__data: + name: IndexSeries__data + description: Index of the image (using zero-indexing) in the linked Images object. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: This field is unused by IndexSeries. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: This field is unused by IndexSeries. + range: float32 + required: false + resolution: + name: resolution + description: This field is unused by IndexSeries. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: This field is unused by IndexSeries and has the value N/A. + ifabsent: string(N/A) + range: text + required: true + equals_string: N/A + value: + name: value + array: + dimensions: + - alias: num_times + range: uint32 diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml index 5bfeb44..8cb7a86 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.misc.yaml @@ -38,7 +38,6 @@ classes: description: Values of each feature at each time. range: AbstractFeatureSeries__data required: true - multivalued: false inlined: true feature_units: name: feature_units @@ -70,6 +69,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Since there can be different units for different features, store @@ -105,13 +149,79 @@ classes: data: name: data description: Annotations made during an experiment. + range: AnnotationSeries__data + required: true + inlined: true + tree_root: true + AnnotationSeries__data: + name: AnnotationSeries__data + description: Annotations made during an experiment. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: text - required: true - multivalued: false - tree_root: true IntervalSeries: name: IntervalSeries description: Stores intervals of data. The timestamps field stores the beginning @@ -131,13 +241,79 @@ classes: data: name: data description: Use values >0 if interval started, <0 if interval ended. + range: IntervalSeries__data + required: true + inlined: true + tree_root: true + IntervalSeries__data: + name: IntervalSeries__data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: int8 - required: true - multivalued: false - tree_root: true DecompositionSeries: name: DecompositionSeries description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -153,14 +329,12 @@ classes: description: Data decomposed into frequency bands. range: DecompositionSeries__data required: true - multivalued: false inlined: true metric: name: metric description: The metric used, e.g. phase, amplitude, power. range: text required: true - multivalued: false source_channels: name: source_channels annotations: @@ -173,8 +347,6 @@ classes: description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion - required: false - multivalued: false inlined: true bands: name: bands @@ -182,7 +354,6 @@ classes: from. There should be one row in this table for each band. range: DecompositionSeries__bands required: true - multivalued: false inlined: true inlined_as_list: true source_timeseries: @@ -191,8 +362,6 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: TimeSeries @@ -209,6 +378,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -289,63 +503,13 @@ classes: identifier: true range: string required: true - spike_times_index: - name: spike_times_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the spike_times dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - spike_times: - name: spike_times - description: Spike times for each unit. - range: Units__spike_times - required: false - multivalued: false - inlined: true - obs_intervals_index: - name: obs_intervals_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the obs_intervals dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - obs_intervals: - name: obs_intervals - description: Observation intervals for each unit. + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. array: - dimensions: - - alias: num_intervals - - alias: start_end - exact_cardinality: 2 - range: float64 - required: false - multivalued: false - electrodes_index: - name: electrodes_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into electrodes. - range: VectorIndex + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: ElectrodeGroup required: false multivalued: false inlined: true @@ -360,51 +524,69 @@ classes: value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion - required: false - multivalued: false inlined: true - electrode_group: - name: electrode_group - description: Electrode group that each spike unit came from. + electrodes_index: + name: electrodes_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into electrodes. + range: VectorIndex + inlined: true + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: ElectrodeGroup + dimensions: + - alias: num_intervals + - alias: start_end + exact_cardinality: 2 + range: float64 required: false multivalued: false + obs_intervals_index: + name: obs_intervals_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the obs_intervals dataset. + range: VectorIndex + inlined: true + spike_times: + name: spike_times + description: Spike times for each unit. + range: Units__spike_times + inlined: true + spike_times_index: + name: spike_times_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the spike_times dataset. + range: VectorIndex inlined: true waveform_mean: name: waveform_mean description: Spike waveform mean for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_mean + inlined: true waveform_sd: name: waveform_sd description: Spike waveform standard deviation for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_sd + inlined: true waveforms: name: waveforms description: Individual waveforms for each spike on each electrode. This is @@ -430,13 +612,8 @@ classes: order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - array: - dimensions: - - alias: num_waveforms - - alias: num_samples - range: numeric - required: false - multivalued: false + range: Units__waveforms + inlined: true waveforms_index: name: waveforms_index annotations: @@ -449,8 +626,6 @@ classes: description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true waveforms_index_index: name: waveforms_index_index @@ -464,8 +639,6 @@ classes: description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true Units__spike_times: @@ -489,3 +662,97 @@ classes: for the spike time to be between samples. range: float64 required: false + Units__waveform_mean: + name: Units__waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_mean) + identifier: true + range: string + required: true + equals_string: waveform_mean + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveform_sd: + name: Units__waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_sd) + identifier: true + range: string + required: true + equals_string: waveform_sd + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveforms: + name: Units__waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveforms) + identifier: true + range: string + required: true + equals_string: waveforms + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml index 8c6b076..99b02f7 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml @@ -27,12 +27,9 @@ classes: data: name: data description: Applied power for optogenetic stimulus, in watts. - array: - dimensions: - - alias: num_times - range: numeric + range: OptogeneticSeries__data required: true - multivalued: false + inlined: true site: name: site annotations: @@ -40,12 +37,80 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: OptogeneticStimulusSite - range: string tree_root: true + OptogeneticSeries__data: + name: OptogeneticSeries__data + description: Applied power for optogenetic stimulus, in watts. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + ifabsent: string(watts) + range: text + required: true + equals_string: watts + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric OptogeneticStimulusSite: name: OptogeneticStimulusSite description: A site of optogenetic stimulation. @@ -61,13 +126,11 @@ classes: description: Description of stimulation site. range: text required: true - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false location: name: location description: Location of the stimulation site. Specify the area, layer, comments @@ -75,7 +138,6 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false device: name: device annotations: @@ -83,7 +145,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml index 17bb442..3658597 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml @@ -60,7 +60,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -80,17 +79,9 @@ classes: data: name: data description: Signals from ROIs. - range: numeric + range: RoiResponseSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_rois + inlined: true rois: name: rois annotations: @@ -104,9 +95,82 @@ classes: on the ROIs stored in this timeseries. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true + RoiResponseSeries__data: + name: RoiResponseSeries__data + description: Signals from ROIs. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois DfOverF: name: DfOverF description: dF/F information about a region of interest (ROI). Storage hierarchy @@ -182,6 +246,13 @@ classes: - alias: num_x - alias: num_y - alias: num_z + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + range: PlaneSegmentation__pixel_mask + inlined: true pixel_mask_index: name: pixel_mask_index annotations: @@ -193,17 +264,13 @@ classes: value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex - required: false - multivalued: false inlined: true - pixel_mask: - name: pixel_mask - description: 'Pixel masks for each ROI: a list of indices and weights for - the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' - range: PlaneSegmentation__pixel_mask - required: false - multivalued: false + range: PlaneSegmentation__voxel_mask inlined: true voxel_mask_index: name: voxel_mask_index @@ -216,17 +283,6 @@ classes: value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex - required: false - multivalued: false - inlined: true - voxel_mask: - name: voxel_mask - description: 'Voxel masks for each ROI: a list of indices and weights for - the ROI. Voxel masks are concatenated and parsing of this dataset is maintained - by the PlaneSegmentation' - range: PlaneSegmentation__voxel_mask - required: false - multivalued: false inlined: true reference_images: name: reference_images @@ -243,7 +299,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -269,24 +324,18 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Pixel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the pixel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false PlaneSegmentation__voxel_mask: name: PlaneSegmentation__voxel_mask description: 'Voxel masks for each ROI: a list of indices and weights for the @@ -307,32 +356,24 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Voxel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false z: name: z description: Voxel z-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the voxel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ImagingPlane: name: ImagingPlane description: An imaging plane and its metadata. @@ -347,27 +388,21 @@ classes: name: description description: Description of the imaging plane. range: text - required: false - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false imaging_rate: name: imaging_rate description: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. range: float32 - required: false - multivalued: false indicator: name: indicator description: Calcium indicator. range: text required: true - multivalued: false location: name: location description: Location of the imaging plane. Specify the area, layer, comments @@ -375,15 +410,12 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false manifold: name: manifold description: DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. range: ImagingPlane__manifold - required: false - multivalued: false inlined: true origin_coords: name: origin_coords @@ -391,8 +423,6 @@ classes: 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). range: ImagingPlane__origin_coords - required: false - multivalued: false inlined: true grid_spacing: name: grid_spacing @@ -400,8 +430,6 @@ classes: in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. range: ImagingPlane__grid_spacing - required: false - multivalued: false inlined: true reference_frame: name: reference_frame @@ -423,8 +451,6 @@ classes: axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." range: text - required: false - multivalued: false optical_channel: name: optical_channel description: An optical channel used to record from an imaging plane. @@ -440,7 +466,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -573,13 +598,11 @@ classes: description: Description or other notes about the channel. range: text required: true - multivalued: false emission_lambda: name: emission_lambda description: Emission wavelength for channel, in nm. range: float32 required: true - multivalued: false tree_root: true MotionCorrection: name: MotionCorrection @@ -610,7 +633,6 @@ classes: description: Image stack with frames shifted to the common coordinates. range: ImageSeries required: true - multivalued: false inlined: true inlined_as_list: false xy_translation: @@ -619,7 +641,6 @@ classes: coordinates, for example, to align each frame to a reference image. range: TimeSeries required: true - multivalued: false inlined: true inlined_as_list: false original: @@ -629,7 +650,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml index 26b6ed6..d3d25ee 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml @@ -37,30 +37,24 @@ classes: description: Phase response to stimulus on the first measured axis. range: ImagingRetinotopy__axis_1_phase_map required: true - multivalued: false inlined: true axis_1_power_map: name: axis_1_power_map description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_1_power_map - required: false - multivalued: false inlined: true axis_2_phase_map: name: axis_2_phase_map description: Phase response to stimulus on the second measured axis. range: ImagingRetinotopy__axis_2_phase_map required: true - multivalued: false inlined: true axis_2_power_map: name: axis_2_power_map description: Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_2_power_map - required: false - multivalued: false inlined: true axis_descriptions: name: axis_descriptions @@ -79,16 +73,12 @@ classes: description: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].' range: ImagingRetinotopy__focal_depth_image - required: false - multivalued: false inlined: true sign_map: name: sign_map description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: ImagingRetinotopy__sign_map - required: false - multivalued: false inlined: true vasculature_image: name: vasculature_image @@ -96,7 +86,6 @@ classes: [rows][columns]' range: ImagingRetinotopy__vasculature_image required: true - multivalued: false inlined: true tree_root: true ImagingRetinotopy__axis_1_phase_map: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml index 9aeec32..f7f96c9 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml @@ -47,7 +47,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -56,7 +55,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to @@ -64,7 +62,6 @@ classes: exact_number_dimensions: 1 range: TimeSeries required: true - multivalued: false inlined: true tree_root: true Image: @@ -189,7 +186,6 @@ classes: external file. range: TimeSeries__data required: true - multivalued: false inlined: true starting_time: name: starting_time @@ -197,8 +193,6 @@ classes: uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. range: TimeSeries__starting_time - required: false - multivalued: false inlined: true timestamps: name: timestamps @@ -241,8 +235,6 @@ classes: external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. range: TimeSeries__sync - required: false - multivalued: false inlined: true inlined_as_list: true tree_root: true @@ -429,7 +421,5 @@ classes: and only once, so the dataset should have the same length as the number of images. range: ImageReferences - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml index 9d96389..47c9e78 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml @@ -38,14 +38,11 @@ classes: reference frame. range: SpatialSeries__data required: true - multivalued: false inlined: true reference_frame: name: reference_frame description: Description defining what exactly 'straight-ahead' means. range: text - required: false - multivalued: false tree_root: true SpatialSeries__data: name: SpatialSeries__data @@ -59,6 +56,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml index 6fba341..2434030 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml @@ -39,40 +39,6 @@ classes: about the filter properties as possible. range: text required: false - data: - name: data - description: Recorded voltage data. - range: numeric - required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_channels - - array: - dimensions: - - alias: num_times - - alias: num_channels - - alias: num_samples - electrodes: - name: electrodes - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: DynamicTableRegion pointer to the electrodes that this time series - was generated from. - range: DynamicTableRegion - required: true - multivalued: false - inlined: true channel_conversion: name: channel_conversion description: Channel-specific conversion factor. Multiply the data in the @@ -90,7 +56,109 @@ classes: range: float32 required: false multivalued: false + data: + name: data + description: Recorded voltage data. + range: ElectricalSeries__data + required: true + inlined: true + electrodes: + name: electrodes + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + range: DynamicTableRegion + required: true + inlined: true tree_root: true + ElectricalSeries__data: + name: ElectricalSeries__data + description: Recorded voltage data. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion', + followed by 'channel_conversion' (if present), and then add 'offset'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples SpikeEventSeries: name: SpikeEventSeries description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold @@ -111,19 +179,9 @@ classes: data: name: data description: Spike waveforms. - range: numeric + range: SpikeEventSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_events - - alias: num_samples - - array: - dimensions: - - alias: num_events - - alias: num_channels - - alias: num_samples + inlined: true timestamps: name: timestamps description: Timestamps for samples stored in data, in seconds, relative to @@ -137,6 +195,82 @@ classes: required: true multivalued: false tree_root: true + SpikeEventSeries__data: + name: SpikeEventSeries__data + description: Spike waveforms. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples FeatureExtraction: name: FeatureExtraction description: Features, such as PC1 and PC2, that are extracted from signals stored @@ -192,7 +326,6 @@ classes: was generated from. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true EventDetection: @@ -212,7 +345,6 @@ classes: or dV/dT threshold, as well as relevant values. range: text required: true - multivalued: false source_idx: name: source_idx description: Indices (zero-based) into source ElectricalSeries::data array @@ -241,7 +373,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ElectricalSeries @@ -323,8 +454,6 @@ classes: name: position description: stereotaxic or common framework coordinates range: ElectrodeGroup__position - required: false - multivalued: false inlined: true device: name: device @@ -333,7 +462,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -356,24 +484,18 @@ classes: array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false y: name: y description: y coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false z: name: z description: z coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ClusterWaveforms: name: ClusterWaveforms description: DEPRECATED The mean waveform shape, including standard deviation, @@ -395,7 +517,6 @@ classes: description: Filtering applied to data before generating mean/sd range: text required: true - multivalued: false waveform_mean: name: waveform_mean description: The mean waveform for each cluster, using the same indices for @@ -427,7 +548,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Clustering @@ -451,7 +571,6 @@ classes: clusters curated using Klusters, etc) range: text required: true - multivalued: false num: name: num description: Cluster number of each event diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml index 0a9685b..cbe9de3 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml @@ -63,8 +63,6 @@ classes: value: neurodata_type_inc description: Index for tags. range: VectorIndex - required: false - multivalued: false inlined: true timeseries: name: timeseries @@ -77,8 +75,6 @@ classes: value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData - required: false - multivalued: false inlined: true timeseries_index: name: timeseries_index @@ -91,7 +87,5 @@ classes: value: neurodata_type_inc description: Index for timeseries. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml index 481256f..1a0cf34 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml @@ -81,13 +81,11 @@ classes: other files. range: text required: true - multivalued: false session_description: name: session_description description: A description of the experimental session and data in the file. range: text required: true - multivalued: false session_start_time: name: session_start_time description: 'Date and time of the experiment/session start. The date is stored @@ -96,7 +94,6 @@ classes: offset. Date accuracy is up to milliseconds.' range: isodatetime required: true - multivalued: false timestamps_reference_time: name: timestamps_reference_time description: 'Date and time corresponding to time zero of all timestamps. @@ -106,7 +103,6 @@ classes: times stored in the file use this time as reference (i.e., time zero).' range: isodatetime required: true - multivalued: false acquisition: name: acquisition description: Data streams recorded from the system, including ephys, ophys, @@ -185,7 +181,6 @@ classes: can exist in the present file or can be linked to a remote library file. range: NWBFile__stimulus required: true - multivalued: false inlined: true inlined_as_list: true general: @@ -207,7 +202,6 @@ classes: should not be created unless there is data to store within them. range: NWBFile__general required: true - multivalued: false inlined: true inlined_as_list: true intervals: @@ -217,16 +211,12 @@ classes: an experiment, or epochs (see epochs subgroup) deriving from analysis of data. range: NWBFile__intervals - required: false - multivalued: false inlined: true inlined_as_list: true units: name: units description: Data about sorted spike units. range: Units - required: false - multivalued: false inlined: true inlined_as_list: false tree_root: true @@ -300,14 +290,10 @@ classes: name: data_collection description: Notes about data collection and analysis. range: text - required: false - multivalued: false experiment_description: name: experiment_description description: General description of the experiment. range: text - required: false - multivalued: false experimenter: name: experimenter description: Name of person(s) who performed the experiment. Can also specify @@ -322,8 +308,6 @@ classes: name: institution description: Institution(s) where experiment was performed. range: text - required: false - multivalued: false keywords: name: keywords description: Terms to search over. @@ -337,28 +321,20 @@ classes: name: lab description: Laboratory where experiment was performed. range: text - required: false - multivalued: false notes: name: notes description: Notes about the experiment. range: text - required: false - multivalued: false pharmacology: name: pharmacology description: Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. range: text - required: false - multivalued: false protocol: name: protocol description: Experimental protocol, if applicable. e.g., include IACUC protocol number. range: text - required: false - multivalued: false related_publications: name: related_publications description: Publication information. PMID, DOI, URL, etc. @@ -372,49 +348,36 @@ classes: name: session_id description: Lab-specific ID for the session. range: text - required: false - multivalued: false slices: name: slices description: Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. range: text - required: false - multivalued: false source_script: name: source_script description: Script file or link to public source code used to create this NWB file. range: general__source_script - required: false - multivalued: false inlined: true stimulus: name: stimulus description: Notes about stimuli, such as how and where they were presented. range: text - required: false - multivalued: false surgery: name: surgery description: Narrative description about surgery/surgeries, including date(s) and who performed surgery. range: text - required: false - multivalued: false virus: name: virus description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - required: false - multivalued: false lab_meta_data: name: lab_meta_data description: Place-holder than can be extended so that lab-specific meta-data can be placed in /general. range: LabMetaData - required: false multivalued: true inlined: true inlined_as_list: false @@ -432,24 +395,18 @@ classes: description: Information about the animal or person from which the data was measured. range: Subject - required: false - multivalued: false inlined: true inlined_as_list: false extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. range: general__extracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. range: general__intracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true optogenetics: @@ -504,7 +461,6 @@ classes: name: electrode_group description: Physical group of electrodes. range: ElectrodeGroup - required: false multivalued: true inlined: true inlined_as_list: false @@ -512,8 +468,6 @@ classes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes - required: false - multivalued: false inlined: true inlined_as_list: true extracellular_ephys__electrodes: @@ -660,13 +614,10 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - required: false - multivalued: false intracellular_electrode: name: intracellular_electrode description: An intracellular electrode. range: IntracellularElectrode - required: false multivalued: true inlined: true inlined_as_list: false @@ -677,8 +628,6 @@ classes: tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.' range: SweepTable - required: false - multivalued: false inlined: true inlined_as_list: false intracellular_recordings: @@ -696,8 +645,6 @@ classes: to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. range: IntracellularRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false simultaneous_recordings: @@ -706,8 +653,6 @@ classes: the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes range: SimultaneousRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false sequential_recordings: @@ -717,8 +662,6 @@ classes: together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. range: SequentialRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false repetitions: @@ -728,8 +671,6 @@ classes: type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. range: RepetitionsTable - required: false - multivalued: false inlined: true inlined_as_list: false experimental_conditions: @@ -737,8 +678,6 @@ classes: description: A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. range: ExperimentalConditionsTable - required: false - multivalued: false inlined: true inlined_as_list: false NWBFile__intervals: @@ -759,24 +698,18 @@ classes: description: Divisions in time marking experimental stages or sub-divisions of a single recording session. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false trials: name: trials description: Repeated experimental events that have a logical grouping. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false invalid_times: name: invalid_times description: Time intervals that should be removed from analysis. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false time_intervals: @@ -784,7 +717,6 @@ classes: description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals - required: false multivalued: true inlined: true inlined_as_list: false @@ -813,59 +745,41 @@ classes: name: age description: Age of subject. Can be supplied instead of 'date_of_birth'. range: Subject__age - required: false - multivalued: false inlined: true date_of_birth: name: date_of_birth description: Date of birth of subject. Can be supplied instead of 'age'. range: isodatetime - required: false - multivalued: false description: name: description description: Description of subject and where subject came from (e.g., breeder, if animal). range: text - required: false - multivalued: false genotype: name: genotype description: Genetic strain. If absent, assume Wild Type (WT). range: text - required: false - multivalued: false sex: name: sex description: Gender of subject. range: text - required: false - multivalued: false species: name: species description: Species of subject. range: text - required: false - multivalued: false strain: name: strain description: Strain of subject. range: text - required: false - multivalued: false subject_id: name: subject_id description: ID of animal/person used/participating in experiment (lab convention). range: text - required: false - multivalued: false weight: name: weight description: Weight at time of experiment, at time of surgery and at other important times. range: text - required: false - multivalued: false tree_root: true Subject__age: name: Subject__age diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml index 140e8c8..95b2598 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml @@ -41,15 +41,12 @@ classes: description: Recorded voltage or current. range: PatchClampSeries__data required: true - multivalued: false inlined: true gain: name: gain description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). range: float32 - required: false - multivalued: false electrode: name: electrode annotations: @@ -57,7 +54,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: IntracellularElectrode @@ -74,6 +70,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -99,31 +140,24 @@ classes: identifier: true range: string required: true + bias_current: + name: bias_current + description: Bias current, in amps. + range: float32 + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + range: float32 + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + range: float32 data: name: data description: Recorded voltage. range: CurrentClampSeries__data required: true - multivalued: false inlined: true - bias_current: - name: bias_current - description: Bias current, in amps. - range: float32 - required: false - multivalued: false - bridge_balance: - name: bridge_balance - description: Bridge balance, in ohms. - range: float32 - required: false - multivalued: false - capacitance_compensation: - name: capacitance_compensation - description: Capacitance compensation, in farads. - range: float32 - required: false - multivalued: false tree_root: true CurrentClampSeries__data: name: CurrentClampSeries__data @@ -136,6 +170,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -148,8 +227,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IZeroClampSeries: name: IZeroClampSeries description: Voltage data from an intracellular recording when all current and @@ -176,19 +257,16 @@ classes: description: Bias current, in amps, fixed to 0.0. range: float32 required: true - multivalued: false bridge_balance: name: bridge_balance description: Bridge balance, in ohms, fixed to 0.0. range: float32 required: true - multivalued: false capacitance_compensation: name: capacitance_compensation description: Capacitance compensation, in farads, fixed to 0.0. range: float32 required: true - multivalued: false tree_root: true CurrentClampStimulusSeries: name: CurrentClampStimulusSeries @@ -205,7 +283,6 @@ classes: description: Stimulus current applied. range: CurrentClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true CurrentClampStimulusSeries__data: @@ -219,6 +296,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -231,8 +353,10 @@ classes: equals_string: amperes value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries: name: VoltageClampSeries description: Current data from an intracellular voltage-clamp recording. A corresponding @@ -245,88 +369,48 @@ classes: identifier: true range: string required: true - data: - name: data - description: Recorded current. - range: VoltageClampSeries__data - required: true - multivalued: false - inlined: true capacitance_fast: name: capacitance_fast description: Fast capacitance, in farads. range: VoltageClampSeries__capacitance_fast - required: false - multivalued: false inlined: true capacitance_slow: name: capacitance_slow description: Slow capacitance, in farads. range: VoltageClampSeries__capacitance_slow - required: false - multivalued: false + inlined: true + data: + name: data + description: Recorded current. + range: VoltageClampSeries__data + required: true inlined: true resistance_comp_bandwidth: name: resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. range: VoltageClampSeries__resistance_comp_bandwidth - required: false - multivalued: false inlined: true resistance_comp_correction: name: resistance_comp_correction description: Resistance compensation correction, in percent. range: VoltageClampSeries__resistance_comp_correction - required: false - multivalued: false inlined: true resistance_comp_prediction: name: resistance_comp_prediction description: Resistance compensation prediction, in percent. range: VoltageClampSeries__resistance_comp_prediction - required: false - multivalued: false inlined: true whole_cell_capacitance_comp: name: whole_cell_capacitance_comp description: Whole cell capacitance compensation, in farads. range: VoltageClampSeries__whole_cell_capacitance_comp - required: false - multivalued: false inlined: true whole_cell_series_resistance_comp: name: whole_cell_series_resistance_comp description: Whole cell series resistance compensation, in ohms. range: VoltageClampSeries__whole_cell_series_resistance_comp - required: false - multivalued: false inlined: true tree_root: true - VoltageClampSeries__data: - name: VoltageClampSeries__data - description: Recorded current. - attributes: - name: - name: name - ifabsent: string(data) - identifier: true - range: string - required: true - equals_string: data - unit: - name: unit - description: Base unit of measurement for working with the data. which is - fixed to 'amperes'. Actual stored values are not necessarily stored in these - units. To access the data in these units, multiply 'data' by 'conversion' - and add 'offset'. - ifabsent: string(amperes) - range: text - required: true - equals_string: amperes - value: - name: value - range: AnyType - required: true VoltageClampSeries__capacitance_fast: name: VoltageClampSeries__capacitance_fast description: Fast capacitance, in farads. @@ -371,6 +455,78 @@ classes: name: value range: float32 required: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + ifabsent: string(amperes) + range: text + required: true + equals_string: amperes + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries__resistance_comp_bandwidth: name: VoltageClampSeries__resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. @@ -501,7 +657,6 @@ classes: description: Stimulus voltage applied. range: VoltageClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true VoltageClampStimulusSeries__data: @@ -515,6 +670,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -527,8 +727,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IntracellularElectrode: name: IntracellularElectrode description: An intracellular electrode and its metadata. @@ -543,52 +745,37 @@ classes: name: cell_id description: unique ID of the cell range: text - required: false - multivalued: false description: name: description description: Description of electrode (e.g., whole-cell, sharp, etc.). range: text required: true - multivalued: false filtering: name: filtering description: Electrode specific filtering. range: text - required: false - multivalued: false initial_access_resistance: name: initial_access_resistance description: Initial access resistance. range: text - required: false - multivalued: false location: name: location description: Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. range: text - required: false - multivalued: false resistance: name: resistance description: Electrode resistance, in ohms. range: text - required: false - multivalued: false seal: name: seal description: Information about seal used for recording. range: text - required: false - multivalued: false slice: name: slice description: Information about slice used for recording. range: text - required: false - multivalued: false device: name: device annotations: @@ -596,7 +783,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -615,15 +801,6 @@ classes: identifier: true range: string required: true - sweep_number: - name: sweep_number - description: Sweep number of the PatchClampSeries in that row. - array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: uint32 - required: true - multivalued: false series: name: series description: The PatchClampSeries with the sweep number in that row. @@ -646,8 +823,16 @@ classes: description: Index for series. range: VectorIndex required: true - multivalued: false inlined: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + array: + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: uint32 + required: true + multivalued: false tree_root: true IntracellularElectrodesTable: name: IntracellularElectrodesTable @@ -707,7 +892,6 @@ classes: recording (rows). range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularResponsesTable: @@ -740,7 +924,6 @@ classes: recording (rows) range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularRecordingsTable: @@ -782,15 +965,6 @@ classes: description: Table for storing intracellular electrode related metadata. range: IntracellularElectrodesTable required: true - multivalued: false - inlined: true - inlined_as_list: false - stimuli: - name: stimuli - description: Table for storing intracellular stimulus related metadata. - range: IntracellularStimuliTable - required: true - multivalued: false inlined: true inlined_as_list: false responses: @@ -798,7 +972,13 @@ classes: description: Table for storing intracellular response related metadata. range: IntracellularResponsesTable required: true - multivalued: false + inlined: true + inlined_as_list: false + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + range: IntracellularStimuliTable + required: true inlined: true inlined_as_list: false tree_root: true @@ -822,7 +1002,6 @@ classes: table. range: SimultaneousRecordingsTable__recordings required: true - multivalued: false inlined: true recordings_index: name: recordings_index @@ -836,7 +1015,6 @@ classes: description: Index dataset for the recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true SimultaneousRecordingsTable__recordings: @@ -881,7 +1059,6 @@ classes: table. range: SequentialRecordingsTable__simultaneous_recordings required: true - multivalued: false inlined: true simultaneous_recordings_index: name: simultaneous_recordings_index @@ -895,7 +1072,6 @@ classes: description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true - multivalued: false inlined: true stimulus_type: name: stimulus_type @@ -949,7 +1125,6 @@ classes: table. range: RepetitionsTable__sequential_recordings required: true - multivalued: false inlined: true sequential_recordings_index: name: sequential_recordings_index @@ -963,7 +1138,6 @@ classes: description: Index dataset for the sequential_recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true RepetitionsTable__sequential_recordings: @@ -1005,7 +1179,6 @@ classes: description: A reference to one or more rows in the RepetitionsTable table. range: ExperimentalConditionsTable__repetitions required: true - multivalued: false inlined: true repetitions_index: name: repetitions_index @@ -1019,7 +1192,6 @@ classes: description: Index dataset for the repetitions column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true ExperimentalConditionsTable__repetitions: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml index 4406284..4da87da 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml @@ -91,21 +91,9 @@ classes: name: data description: Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. - range: numeric + range: ImageSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - alias: z + inlined: true dimension: name: dimension description: Number of pixels on x, y, (and z) axes. @@ -123,8 +111,6 @@ classes: used if the image is stored in another NWB file and that file is linked to this file. range: ImageSeries__external_file - required: false - multivalued: false inlined: true format: name: format @@ -132,22 +118,98 @@ classes: contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + ifabsent: string(raw) range: text - required: false - multivalued: false device: name: device annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Device - range: string tree_root: true + ImageSeries__data: + name: ImageSeries__data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z ImageSeries__external_file: name: ImageSeries__external_file description: Paths to one or more external file(s). The field is only present @@ -206,7 +268,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries @@ -226,12 +287,16 @@ classes: identifier: true range: string required: true + data: + name: data + description: Images presented to subject, either grayscale or RGB + range: OpticalSeries__data + required: true + inlined: true distance: name: distance description: Distance from camera/monitor to target/eye. range: float32 - required: false - multivalued: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -247,12 +312,78 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 - data: - name: data - description: Images presented to subject, either grayscale or RGB - range: numeric + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + range: text + tree_root: true + OpticalSeries__data: + name: OpticalSeries__data + description: Images presented to subject, either grayscale or RGB + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string required: true - multivalued: false + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + value: + name: value + range: numeric any_of: - array: dimensions: @@ -266,14 +397,6 @@ classes: - alias: y - alias: r_g_b exact_cardinality: 3 - orientation: - name: orientation - description: Description of image relative to some reference frame (e.g., - which way is up). Must also specify frame of reference. - range: text - required: false - multivalued: false - tree_root: true IndexSeries: name: IndexSeries description: Stores indices to image frames stored in an ImageSeries. The purpose @@ -294,20 +417,15 @@ classes: name: data description: Index of the image (using zero-indexing) in the linked Images object. - array: - dimensions: - - alias: num_times - range: uint32 + range: IndexSeries__data required: true - multivalued: false + inlined: true indexed_timeseries: name: indexed_timeseries annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: ImageSeries @@ -318,10 +436,62 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Images - range: string tree_root: true + IndexSeries__data: + name: IndexSeries__data + description: Index of the image (using zero-indexing) in the linked Images object. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: This field is unused by IndexSeries. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: This field is unused by IndexSeries. + range: float32 + required: false + resolution: + name: resolution + description: This field is unused by IndexSeries. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: This field is unused by IndexSeries and has the value N/A. + ifabsent: string(N/A) + range: text + required: true + equals_string: N/A + value: + name: value + array: + dimensions: + - alias: num_times + range: uint32 diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml index ced8985..021044b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml @@ -38,7 +38,6 @@ classes: description: Values of each feature at each time. range: AbstractFeatureSeries__data required: true - multivalued: false inlined: true feature_units: name: feature_units @@ -70,6 +69,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Since there can be different units for different features, store @@ -105,13 +149,79 @@ classes: data: name: data description: Annotations made during an experiment. + range: AnnotationSeries__data + required: true + inlined: true + tree_root: true + AnnotationSeries__data: + name: AnnotationSeries__data + description: Annotations made during an experiment. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: text - required: true - multivalued: false - tree_root: true IntervalSeries: name: IntervalSeries description: Stores intervals of data. The timestamps field stores the beginning @@ -131,13 +241,79 @@ classes: data: name: data description: Use values >0 if interval started, <0 if interval ended. + range: IntervalSeries__data + required: true + inlined: true + tree_root: true + IntervalSeries__data: + name: IntervalSeries__data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: int8 - required: true - multivalued: false - tree_root: true DecompositionSeries: name: DecompositionSeries description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -153,14 +329,12 @@ classes: description: Data decomposed into frequency bands. range: DecompositionSeries__data required: true - multivalued: false inlined: true metric: name: metric description: The metric used, e.g. phase, amplitude, power. range: text required: true - multivalued: false source_channels: name: source_channels annotations: @@ -173,8 +347,6 @@ classes: description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion - required: false - multivalued: false inlined: true bands: name: bands @@ -182,7 +354,6 @@ classes: from. There should be one row in this table for each band. range: DecompositionSeries__bands required: true - multivalued: false inlined: true inlined_as_list: true source_timeseries: @@ -191,8 +362,6 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: TimeSeries @@ -209,6 +378,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -289,63 +503,13 @@ classes: identifier: true range: string required: true - spike_times_index: - name: spike_times_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the spike_times dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - spike_times: - name: spike_times - description: Spike times for each unit in seconds. - range: Units__spike_times - required: false - multivalued: false - inlined: true - obs_intervals_index: - name: obs_intervals_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the obs_intervals dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - obs_intervals: - name: obs_intervals - description: Observation intervals for each unit. + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. array: - dimensions: - - alias: num_intervals - - alias: start_end - exact_cardinality: 2 - range: float64 - required: false - multivalued: false - electrodes_index: - name: electrodes_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into electrodes. - range: VectorIndex + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: ElectrodeGroup required: false multivalued: false inlined: true @@ -360,51 +524,69 @@ classes: value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion - required: false - multivalued: false inlined: true - electrode_group: - name: electrode_group - description: Electrode group that each spike unit came from. + electrodes_index: + name: electrodes_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into electrodes. + range: VectorIndex + inlined: true + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: ElectrodeGroup + dimensions: + - alias: num_intervals + - alias: start_end + exact_cardinality: 2 + range: float64 required: false multivalued: false + obs_intervals_index: + name: obs_intervals_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the obs_intervals dataset. + range: VectorIndex + inlined: true + spike_times: + name: spike_times + description: Spike times for each unit in seconds. + range: Units__spike_times + inlined: true + spike_times_index: + name: spike_times_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the spike_times dataset. + range: VectorIndex inlined: true waveform_mean: name: waveform_mean description: Spike waveform mean for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_mean + inlined: true waveform_sd: name: waveform_sd description: Spike waveform standard deviation for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_sd + inlined: true waveforms: name: waveforms description: Individual waveforms for each spike on each electrode. This is @@ -430,13 +612,8 @@ classes: order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - array: - dimensions: - - alias: num_waveforms - - alias: num_samples - range: numeric - required: false - multivalued: false + range: Units__waveforms + inlined: true waveforms_index: name: waveforms_index annotations: @@ -449,8 +626,6 @@ classes: description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true waveforms_index_index: name: waveforms_index_index @@ -464,8 +639,6 @@ classes: description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true Units__spike_times: @@ -489,3 +662,97 @@ classes: for the spike time to be between samples. range: float64 required: false + Units__waveform_mean: + name: Units__waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_mean) + identifier: true + range: string + required: true + equals_string: waveform_mean + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveform_sd: + name: Units__waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_sd) + identifier: true + range: string + required: true + equals_string: waveform_sd + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveforms: + name: Units__waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveforms) + identifier: true + range: string + required: true + equals_string: waveforms + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml index b485822..aa9b528 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml @@ -27,12 +27,9 @@ classes: data: name: data description: Applied power for optogenetic stimulus, in watts. - array: - dimensions: - - alias: num_times - range: numeric + range: OptogeneticSeries__data required: true - multivalued: false + inlined: true site: name: site annotations: @@ -40,12 +37,80 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: OptogeneticStimulusSite - range: string tree_root: true + OptogeneticSeries__data: + name: OptogeneticSeries__data + description: Applied power for optogenetic stimulus, in watts. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + ifabsent: string(watts) + range: text + required: true + equals_string: watts + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric OptogeneticStimulusSite: name: OptogeneticStimulusSite description: A site of optogenetic stimulation. @@ -61,13 +126,11 @@ classes: description: Description of stimulation site. range: text required: true - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false location: name: location description: Location of the stimulation site. Specify the area, layer, comments @@ -75,7 +138,6 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false device: name: device annotations: @@ -83,7 +145,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml index 3da9ec5..9226935 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml @@ -26,11 +26,31 @@ classes: identifier: true range: string required: true + binning: + name: binning + description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + range: uint8 + required: false + exposure_time: + name: exposure_time + description: Exposure time of the sample; often the inverse of the frequency. + range: float32 + required: false + intensity: + name: intensity + description: Intensity of the excitation in mW/mm^2, if known. + range: float32 + required: false pmt_gain: name: pmt_gain description: Photomultiplier gain. range: float32 required: false + power: + name: power + description: Power of the excitation in mW, if known. + range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology @@ -38,26 +58,6 @@ classes: be stored w/ the actual data. range: float32 required: false - exposure_time: - name: exposure_time - description: Exposure time of the sample; often the inverse of the frequency. - range: float32 - required: false - binning: - name: binning - description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. - range: uint8 - required: false - power: - name: power - description: Power of the excitation in mW, if known. - range: float32 - required: false - intensity: - name: intensity - description: Intensity of the excitation in mW/mm^2, if known. - range: float32 - required: false imaging_plane: name: imaging_plane annotations: @@ -65,7 +65,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -115,7 +114,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -135,17 +133,9 @@ classes: data: name: data description: Signals from ROIs. - range: numeric + range: RoiResponseSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_rois + inlined: true rois: name: rois annotations: @@ -159,9 +149,82 @@ classes: on the ROIs stored in this timeseries. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true + RoiResponseSeries__data: + name: RoiResponseSeries__data + description: Signals from ROIs. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois DfOverF: name: DfOverF description: dF/F information about a region of interest (ROI). Storage hierarchy @@ -237,6 +300,13 @@ classes: - alias: num_x - alias: num_y - alias: num_z + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + range: PlaneSegmentation__pixel_mask + inlined: true pixel_mask_index: name: pixel_mask_index annotations: @@ -248,17 +318,13 @@ classes: value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex - required: false - multivalued: false inlined: true - pixel_mask: - name: pixel_mask - description: 'Pixel masks for each ROI: a list of indices and weights for - the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' - range: PlaneSegmentation__pixel_mask - required: false - multivalued: false + range: PlaneSegmentation__voxel_mask inlined: true voxel_mask_index: name: voxel_mask_index @@ -271,17 +337,6 @@ classes: value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex - required: false - multivalued: false - inlined: true - voxel_mask: - name: voxel_mask - description: 'Voxel masks for each ROI: a list of indices and weights for - the ROI. Voxel masks are concatenated and parsing of this dataset is maintained - by the PlaneSegmentation' - range: PlaneSegmentation__voxel_mask - required: false - multivalued: false inlined: true reference_images: name: reference_images @@ -298,7 +353,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -324,24 +378,18 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Pixel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the pixel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false PlaneSegmentation__voxel_mask: name: PlaneSegmentation__voxel_mask description: 'Voxel masks for each ROI: a list of indices and weights for the @@ -362,32 +410,24 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Voxel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false z: name: z description: Voxel z-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the voxel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ImagingPlane: name: ImagingPlane description: An imaging plane and its metadata. @@ -402,27 +442,21 @@ classes: name: description description: Description of the imaging plane. range: text - required: false - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false imaging_rate: name: imaging_rate description: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. range: float32 - required: false - multivalued: false indicator: name: indicator description: Calcium indicator. range: text required: true - multivalued: false location: name: location description: Location of the imaging plane. Specify the area, layer, comments @@ -430,15 +464,12 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false manifold: name: manifold description: DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. range: ImagingPlane__manifold - required: false - multivalued: false inlined: true origin_coords: name: origin_coords @@ -446,8 +477,6 @@ classes: 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). range: ImagingPlane__origin_coords - required: false - multivalued: false inlined: true grid_spacing: name: grid_spacing @@ -455,8 +484,6 @@ classes: in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. range: ImagingPlane__grid_spacing - required: false - multivalued: false inlined: true reference_frame: name: reference_frame @@ -478,8 +505,6 @@ classes: axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." range: text - required: false - multivalued: false optical_channel: name: optical_channel description: An optical channel used to record from an imaging plane. @@ -495,7 +520,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -628,13 +652,11 @@ classes: description: Description or other notes about the channel. range: text required: true - multivalued: false emission_lambda: name: emission_lambda description: Emission wavelength for channel, in nm. range: float32 required: true - multivalued: false tree_root: true MotionCorrection: name: MotionCorrection @@ -665,7 +687,6 @@ classes: description: Image stack with frames shifted to the common coordinates. range: ImageSeries required: true - multivalued: false inlined: true inlined_as_list: false xy_translation: @@ -674,7 +695,6 @@ classes: coordinates, for example, to align each frame to a reference image. range: TimeSeries required: true - multivalued: false inlined: true inlined_as_list: false original: @@ -684,7 +704,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml index c1fce82..33116cf 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml @@ -37,30 +37,24 @@ classes: description: Phase response to stimulus on the first measured axis. range: ImagingRetinotopy__axis_1_phase_map required: true - multivalued: false inlined: true axis_1_power_map: name: axis_1_power_map description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_1_power_map - required: false - multivalued: false inlined: true axis_2_phase_map: name: axis_2_phase_map description: Phase response to stimulus on the second measured axis. range: ImagingRetinotopy__axis_2_phase_map required: true - multivalued: false inlined: true axis_2_power_map: name: axis_2_power_map description: Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_2_power_map - required: false - multivalued: false inlined: true axis_descriptions: name: axis_descriptions @@ -79,16 +73,12 @@ classes: description: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].' range: ImagingRetinotopy__focal_depth_image - required: false - multivalued: false inlined: true sign_map: name: sign_map description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: ImagingRetinotopy__sign_map - required: false - multivalued: false inlined: true vasculature_image: name: vasculature_image @@ -96,7 +86,6 @@ classes: [rows][columns]' range: ImagingRetinotopy__vasculature_image required: true - multivalued: false inlined: true tree_root: true ImagingRetinotopy__axis_1_phase_map: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml index 7c3450a..ab0ec35 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -47,7 +47,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false count: name: count description: Number of data samples available in this time series, during @@ -56,7 +55,6 @@ classes: exact_number_dimensions: 1 range: int32 required: true - multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to @@ -64,7 +62,6 @@ classes: exact_number_dimensions: 1 range: TimeSeries required: true - multivalued: false inlined: true tree_root: true Image: @@ -189,7 +186,6 @@ classes: external file. range: TimeSeries__data required: true - multivalued: false inlined: true starting_time: name: starting_time @@ -197,8 +193,6 @@ classes: uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. range: TimeSeries__starting_time - required: false - multivalued: false inlined: true timestamps: name: timestamps @@ -241,8 +235,6 @@ classes: external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. range: TimeSeries__sync - required: false - multivalued: false inlined: true inlined_as_list: true tree_root: true @@ -429,7 +421,5 @@ classes: and only once, so the dataset should have the same length as the number of images. range: ImageReferences - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml index 32ff4f8..5f2dc7d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml @@ -38,14 +38,11 @@ classes: reference frame. range: SpatialSeries__data required: true - multivalued: false inlined: true reference_frame: name: reference_frame description: Description defining what exactly 'straight-ahead' means. range: text - required: false - multivalued: false tree_root: true SpatialSeries__data: name: SpatialSeries__data @@ -59,6 +56,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml index 71eadb4..db3213f 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -39,40 +39,6 @@ classes: about the filter properties as possible. range: text required: false - data: - name: data - description: Recorded voltage data. - range: numeric - required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_channels - - array: - dimensions: - - alias: num_times - - alias: num_channels - - alias: num_samples - electrodes: - name: electrodes - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: DynamicTableRegion pointer to the electrodes that this time series - was generated from. - range: DynamicTableRegion - required: true - multivalued: false - inlined: true channel_conversion: name: channel_conversion description: Channel-specific conversion factor. Multiply the data in the @@ -90,7 +56,109 @@ classes: range: float32 required: false multivalued: false + data: + name: data + description: Recorded voltage data. + range: ElectricalSeries__data + required: true + inlined: true + electrodes: + name: electrodes + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + range: DynamicTableRegion + required: true + inlined: true tree_root: true + ElectricalSeries__data: + name: ElectricalSeries__data + description: Recorded voltage data. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. This value + is fixed to 'volts'. Actual stored values are not necessarily stored in + these units. To access the data in these units, multiply 'data' by 'conversion', + followed by 'channel_conversion' (if present), and then add 'offset'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples SpikeEventSeries: name: SpikeEventSeries description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold @@ -111,19 +179,9 @@ classes: data: name: data description: Spike waveforms. - range: numeric + range: SpikeEventSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_events - - alias: num_samples - - array: - dimensions: - - alias: num_events - - alias: num_channels - - alias: num_samples + inlined: true timestamps: name: timestamps description: Timestamps for samples stored in data, in seconds, relative to @@ -137,6 +195,82 @@ classes: required: true multivalued: false tree_root: true + SpikeEventSeries__data: + name: SpikeEventSeries__data + description: Spike waveforms. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for waveforms, which is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: true + equals_string: volts + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples FeatureExtraction: name: FeatureExtraction description: Features, such as PC1 and PC2, that are extracted from signals stored @@ -192,7 +326,6 @@ classes: was generated from. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true EventDetection: @@ -212,7 +345,6 @@ classes: or dV/dT threshold, as well as relevant values. range: text required: true - multivalued: false source_idx: name: source_idx description: Indices (zero-based) into source ElectricalSeries::data array @@ -241,7 +373,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ElectricalSeries @@ -323,8 +454,6 @@ classes: name: position description: stereotaxic or common framework coordinates range: ElectrodeGroup__position - required: false - multivalued: false inlined: true device: name: device @@ -333,7 +462,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -356,24 +484,18 @@ classes: array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false y: name: y description: y coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false z: name: z description: z coordinate array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ClusterWaveforms: name: ClusterWaveforms description: DEPRECATED The mean waveform shape, including standard deviation, @@ -395,7 +517,6 @@ classes: description: Filtering applied to data before generating mean/sd range: text required: true - multivalued: false waveform_mean: name: waveform_mean description: The mean waveform for each cluster, using the same indices for @@ -427,7 +548,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Clustering @@ -451,7 +571,6 @@ classes: clusters curated using Klusters, etc) range: text required: true - multivalued: false num: name: num description: Cluster number of each event diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml index 471b87a..e556749 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml @@ -63,8 +63,6 @@ classes: value: neurodata_type_inc description: Index for tags. range: VectorIndex - required: false - multivalued: false inlined: true timeseries: name: timeseries @@ -77,8 +75,6 @@ classes: value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData - required: false - multivalued: false inlined: true timeseries_index: name: timeseries_index @@ -91,7 +87,5 @@ classes: value: neurodata_type_inc description: Index for timeseries. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml index a6b27f5..fcd10dc 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -81,13 +81,11 @@ classes: other files. range: text required: true - multivalued: false session_description: name: session_description description: A description of the experimental session and data in the file. range: text required: true - multivalued: false session_start_time: name: session_start_time description: 'Date and time of the experiment/session start. The date is stored @@ -96,7 +94,6 @@ classes: offset. Date accuracy is up to milliseconds.' range: isodatetime required: true - multivalued: false timestamps_reference_time: name: timestamps_reference_time description: 'Date and time corresponding to time zero of all timestamps. @@ -106,7 +103,6 @@ classes: times stored in the file use this time as reference (i.e., time zero).' range: isodatetime required: true - multivalued: false acquisition: name: acquisition description: Data streams recorded from the system, including ephys, ophys, @@ -185,7 +181,6 @@ classes: can exist in the present file or can be linked to a remote library file. range: NWBFile__stimulus required: true - multivalued: false inlined: true inlined_as_list: true general: @@ -207,7 +202,6 @@ classes: should not be created unless there is data to store within them. range: NWBFile__general required: true - multivalued: false inlined: true inlined_as_list: true intervals: @@ -217,16 +211,12 @@ classes: an experiment, or epochs (see epochs subgroup) deriving from analysis of data. range: NWBFile__intervals - required: false - multivalued: false inlined: true inlined_as_list: true units: name: units description: Data about sorted spike units. range: Units - required: false - multivalued: false inlined: true inlined_as_list: false tree_root: true @@ -302,14 +292,10 @@ classes: name: data_collection description: Notes about data collection and analysis. range: text - required: false - multivalued: false experiment_description: name: experiment_description description: General description of the experiment. range: text - required: false - multivalued: false experimenter: name: experimenter description: Name of person(s) who performed the experiment. Can also specify @@ -324,8 +310,6 @@ classes: name: institution description: Institution(s) where experiment was performed. range: text - required: false - multivalued: false keywords: name: keywords description: Terms to search over. @@ -339,28 +323,20 @@ classes: name: lab description: Laboratory where experiment was performed. range: text - required: false - multivalued: false notes: name: notes description: Notes about the experiment. range: text - required: false - multivalued: false pharmacology: name: pharmacology description: Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. range: text - required: false - multivalued: false protocol: name: protocol description: Experimental protocol, if applicable. e.g., include IACUC protocol number. range: text - required: false - multivalued: false related_publications: name: related_publications description: Publication information. PMID, DOI, URL, etc. @@ -374,49 +350,36 @@ classes: name: session_id description: Lab-specific ID for the session. range: text - required: false - multivalued: false slices: name: slices description: Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. range: text - required: false - multivalued: false source_script: name: source_script description: Script file or link to public source code used to create this NWB file. range: general__source_script - required: false - multivalued: false inlined: true stimulus: name: stimulus description: Notes about stimuli, such as how and where they were presented. range: text - required: false - multivalued: false surgery: name: surgery description: Narrative description about surgery/surgeries, including date(s) and who performed surgery. range: text - required: false - multivalued: false virus: name: virus description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - required: false - multivalued: false lab_meta_data: name: lab_meta_data description: Place-holder than can be extended so that lab-specific meta-data can be placed in /general. range: LabMetaData - required: false multivalued: true inlined: true inlined_as_list: false @@ -434,24 +397,18 @@ classes: description: Information about the animal or person from which the data was measured. range: Subject - required: false - multivalued: false inlined: true inlined_as_list: false extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. range: general__extracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. range: general__intracellular_ephys - required: false - multivalued: false inlined: true inlined_as_list: true optogenetics: @@ -506,7 +463,6 @@ classes: name: electrode_group description: Physical group of electrodes. range: ElectrodeGroup - required: false multivalued: true inlined: true inlined_as_list: false @@ -514,8 +470,6 @@ classes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes - required: false - multivalued: false inlined: true inlined_as_list: true extracellular_ephys__electrodes: @@ -662,13 +616,10 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - required: false - multivalued: false intracellular_electrode: name: intracellular_electrode description: An intracellular electrode. range: IntracellularElectrode - required: false multivalued: true inlined: true inlined_as_list: false @@ -679,8 +630,6 @@ classes: tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.' range: SweepTable - required: false - multivalued: false inlined: true inlined_as_list: false intracellular_recordings: @@ -698,8 +647,6 @@ classes: to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. range: IntracellularRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false simultaneous_recordings: @@ -708,8 +655,6 @@ classes: the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes range: SimultaneousRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false sequential_recordings: @@ -719,8 +664,6 @@ classes: together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. range: SequentialRecordingsTable - required: false - multivalued: false inlined: true inlined_as_list: false repetitions: @@ -730,8 +673,6 @@ classes: type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. range: RepetitionsTable - required: false - multivalued: false inlined: true inlined_as_list: false experimental_conditions: @@ -739,8 +680,6 @@ classes: description: A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. range: ExperimentalConditionsTable - required: false - multivalued: false inlined: true inlined_as_list: false NWBFile__intervals: @@ -761,24 +700,18 @@ classes: description: Divisions in time marking experimental stages or sub-divisions of a single recording session. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false trials: name: trials description: Repeated experimental events that have a logical grouping. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false invalid_times: name: invalid_times description: Time intervals that should be removed from analysis. range: TimeIntervals - required: false - multivalued: false inlined: true inlined_as_list: false time_intervals: @@ -786,7 +719,6 @@ classes: description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals - required: false multivalued: true inlined: true inlined_as_list: false @@ -815,59 +747,41 @@ classes: name: age description: Age of subject. Can be supplied instead of 'date_of_birth'. range: Subject__age - required: false - multivalued: false inlined: true date_of_birth: name: date_of_birth description: Date of birth of subject. Can be supplied instead of 'age'. range: isodatetime - required: false - multivalued: false description: name: description description: Description of subject and where subject came from (e.g., breeder, if animal). range: text - required: false - multivalued: false genotype: name: genotype description: Genetic strain. If absent, assume Wild Type (WT). range: text - required: false - multivalued: false sex: name: sex description: Gender of subject. range: text - required: false - multivalued: false species: name: species description: Species of subject. range: text - required: false - multivalued: false strain: name: strain description: Strain of subject. range: text - required: false - multivalued: false subject_id: name: subject_id description: ID of animal/person used/participating in experiment (lab convention). range: text - required: false - multivalued: false weight: name: weight description: Weight at time of experiment, at time of surgery and at other important times. range: text - required: false - multivalued: false tree_root: true Subject__age: name: Subject__age diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml index a8662e7..30fcb0c 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml @@ -41,15 +41,12 @@ classes: description: Recorded voltage or current. range: PatchClampSeries__data required: true - multivalued: false inlined: true gain: name: gain description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). range: float32 - required: false - multivalued: false electrode: name: electrode annotations: @@ -57,7 +54,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: IntracellularElectrode @@ -74,6 +70,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -99,31 +140,24 @@ classes: identifier: true range: string required: true + bias_current: + name: bias_current + description: Bias current, in amps. + range: float32 + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + range: float32 + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + range: float32 data: name: data description: Recorded voltage. range: CurrentClampSeries__data required: true - multivalued: false inlined: true - bias_current: - name: bias_current - description: Bias current, in amps. - range: float32 - required: false - multivalued: false - bridge_balance: - name: bridge_balance - description: Bridge balance, in ohms. - range: float32 - required: false - multivalued: false - capacitance_compensation: - name: capacitance_compensation - description: Capacitance compensation, in farads. - range: float32 - required: false - multivalued: false tree_root: true CurrentClampSeries__data: name: CurrentClampSeries__data @@ -136,6 +170,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -148,8 +227,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IZeroClampSeries: name: IZeroClampSeries description: Voltage data from an intracellular recording when all current and @@ -176,19 +257,16 @@ classes: description: Bias current, in amps, fixed to 0.0. range: float32 required: true - multivalued: false bridge_balance: name: bridge_balance description: Bridge balance, in ohms, fixed to 0.0. range: float32 required: true - multivalued: false capacitance_compensation: name: capacitance_compensation description: Capacitance compensation, in farads, fixed to 0.0. range: float32 required: true - multivalued: false tree_root: true CurrentClampStimulusSeries: name: CurrentClampStimulusSeries @@ -205,7 +283,6 @@ classes: description: Stimulus current applied. range: CurrentClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true CurrentClampStimulusSeries__data: @@ -219,6 +296,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -231,8 +353,10 @@ classes: equals_string: amperes value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries: name: VoltageClampSeries description: Current data from an intracellular voltage-clamp recording. A corresponding @@ -245,88 +369,48 @@ classes: identifier: true range: string required: true - data: - name: data - description: Recorded current. - range: VoltageClampSeries__data - required: true - multivalued: false - inlined: true capacitance_fast: name: capacitance_fast description: Fast capacitance, in farads. range: VoltageClampSeries__capacitance_fast - required: false - multivalued: false inlined: true capacitance_slow: name: capacitance_slow description: Slow capacitance, in farads. range: VoltageClampSeries__capacitance_slow - required: false - multivalued: false + inlined: true + data: + name: data + description: Recorded current. + range: VoltageClampSeries__data + required: true inlined: true resistance_comp_bandwidth: name: resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. range: VoltageClampSeries__resistance_comp_bandwidth - required: false - multivalued: false inlined: true resistance_comp_correction: name: resistance_comp_correction description: Resistance compensation correction, in percent. range: VoltageClampSeries__resistance_comp_correction - required: false - multivalued: false inlined: true resistance_comp_prediction: name: resistance_comp_prediction description: Resistance compensation prediction, in percent. range: VoltageClampSeries__resistance_comp_prediction - required: false - multivalued: false inlined: true whole_cell_capacitance_comp: name: whole_cell_capacitance_comp description: Whole cell capacitance compensation, in farads. range: VoltageClampSeries__whole_cell_capacitance_comp - required: false - multivalued: false inlined: true whole_cell_series_resistance_comp: name: whole_cell_series_resistance_comp description: Whole cell series resistance compensation, in ohms. range: VoltageClampSeries__whole_cell_series_resistance_comp - required: false - multivalued: false inlined: true tree_root: true - VoltageClampSeries__data: - name: VoltageClampSeries__data - description: Recorded current. - attributes: - name: - name: name - ifabsent: string(data) - identifier: true - range: string - required: true - equals_string: data - unit: - name: unit - description: Base unit of measurement for working with the data. which is - fixed to 'amperes'. Actual stored values are not necessarily stored in these - units. To access the data in these units, multiply 'data' by 'conversion' - and add 'offset'. - ifabsent: string(amperes) - range: text - required: true - equals_string: amperes - value: - name: value - range: AnyType - required: true VoltageClampSeries__capacitance_fast: name: VoltageClampSeries__capacitance_fast description: Fast capacitance, in farads. @@ -371,6 +455,78 @@ classes: name: value range: float32 required: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + ifabsent: string(amperes) + range: text + required: true + equals_string: amperes + value: + name: value + array: + dimensions: + - alias: num_times + range: numeric VoltageClampSeries__resistance_comp_bandwidth: name: VoltageClampSeries__resistance_comp_bandwidth description: Resistance compensation bandwidth, in hertz. @@ -501,7 +657,6 @@ classes: description: Stimulus voltage applied. range: VoltageClampStimulusSeries__data required: true - multivalued: false inlined: true tree_root: true VoltageClampStimulusSeries__data: @@ -515,6 +670,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. which is @@ -527,8 +727,10 @@ classes: equals_string: volts value: name: value - range: AnyType - required: true + array: + dimensions: + - alias: num_times + range: numeric IntracellularElectrode: name: IntracellularElectrode description: An intracellular electrode and its metadata. @@ -543,52 +745,37 @@ classes: name: cell_id description: unique ID of the cell range: text - required: false - multivalued: false description: name: description description: Description of electrode (e.g., whole-cell, sharp, etc.). range: text required: true - multivalued: false filtering: name: filtering description: Electrode specific filtering. range: text - required: false - multivalued: false initial_access_resistance: name: initial_access_resistance description: Initial access resistance. range: text - required: false - multivalued: false location: name: location description: Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. range: text - required: false - multivalued: false resistance: name: resistance description: Electrode resistance, in ohms. range: text - required: false - multivalued: false seal: name: seal description: Information about seal used for recording. range: text - required: false - multivalued: false slice: name: slice description: Information about slice used for recording. range: text - required: false - multivalued: false device: name: device annotations: @@ -596,7 +783,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -615,15 +801,6 @@ classes: identifier: true range: string required: true - sweep_number: - name: sweep_number - description: Sweep number of the PatchClampSeries in that row. - array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: uint32 - required: true - multivalued: false series: name: series description: The PatchClampSeries with the sweep number in that row. @@ -646,8 +823,16 @@ classes: description: Index for series. range: VectorIndex required: true - multivalued: false inlined: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + array: + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: uint32 + required: true + multivalued: false tree_root: true IntracellularElectrodesTable: name: IntracellularElectrodesTable @@ -707,7 +892,6 @@ classes: recording (rows). range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true stimulus_template: name: stimulus_template @@ -721,8 +905,6 @@ classes: description: Column storing the reference to the stimulus template for the recording (rows). range: TimeSeriesReferenceVectorData - required: false - multivalued: false inlined: true tree_root: true IntracellularResponsesTable: @@ -755,7 +937,6 @@ classes: recording (rows) range: TimeSeriesReferenceVectorData required: true - multivalued: false inlined: true tree_root: true IntracellularRecordingsTable: @@ -797,15 +978,6 @@ classes: description: Table for storing intracellular electrode related metadata. range: IntracellularElectrodesTable required: true - multivalued: false - inlined: true - inlined_as_list: false - stimuli: - name: stimuli - description: Table for storing intracellular stimulus related metadata. - range: IntracellularStimuliTable - required: true - multivalued: false inlined: true inlined_as_list: false responses: @@ -813,7 +985,13 @@ classes: description: Table for storing intracellular response related metadata. range: IntracellularResponsesTable required: true - multivalued: false + inlined: true + inlined_as_list: false + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + range: IntracellularStimuliTable + required: true inlined: true inlined_as_list: false tree_root: true @@ -837,7 +1015,6 @@ classes: table. range: SimultaneousRecordingsTable__recordings required: true - multivalued: false inlined: true recordings_index: name: recordings_index @@ -851,7 +1028,6 @@ classes: description: Index dataset for the recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true SimultaneousRecordingsTable__recordings: @@ -896,7 +1072,6 @@ classes: table. range: SequentialRecordingsTable__simultaneous_recordings required: true - multivalued: false inlined: true simultaneous_recordings_index: name: simultaneous_recordings_index @@ -910,7 +1085,6 @@ classes: description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true - multivalued: false inlined: true stimulus_type: name: stimulus_type @@ -964,7 +1138,6 @@ classes: table. range: RepetitionsTable__sequential_recordings required: true - multivalued: false inlined: true sequential_recordings_index: name: sequential_recordings_index @@ -978,7 +1151,6 @@ classes: description: Index dataset for the sequential_recordings column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true RepetitionsTable__sequential_recordings: @@ -1020,7 +1192,6 @@ classes: description: A reference to one or more rows in the RepetitionsTable table. range: ExperimentalConditionsTable__repetitions required: true - multivalued: false inlined: true repetitions_index: name: repetitions_index @@ -1034,7 +1205,6 @@ classes: description: Index dataset for the repetitions column. range: VectorIndex required: true - multivalued: false inlined: true tree_root: true ExperimentalConditionsTable__repetitions: diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml index 603c351..6b17e13 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.image.yaml @@ -91,21 +91,9 @@ classes: name: data description: Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. - range: numeric + range: ImageSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - array: - dimensions: - - alias: frame - - alias: x - - alias: y - - alias: z + inlined: true dimension: name: dimension description: Number of pixels on x, y, (and z) axes. @@ -123,8 +111,6 @@ classes: used if the image is stored in another NWB file and that file is linked to this file. range: ImageSeries__external_file - required: false - multivalued: false inlined: true format: name: format @@ -132,22 +118,98 @@ classes: contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + ifabsent: string(raw) range: text - required: false - multivalued: false device: name: device annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Device - range: string tree_root: true + ImageSeries__data: + name: ImageSeries__data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z ImageSeries__external_file: name: ImageSeries__external_file description: Paths to one or more external file(s). The field is only present @@ -206,7 +268,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries @@ -226,12 +287,16 @@ classes: identifier: true range: string required: true + data: + name: data + description: Images presented to subject, either grayscale or RGB + range: OpticalSeries__data + required: true + inlined: true distance: name: distance description: Distance from camera/monitor to target/eye. range: float32 - required: false - multivalued: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -247,12 +312,78 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 - data: - name: data - description: Images presented to subject, either grayscale or RGB - range: numeric + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + range: text + tree_root: true + OpticalSeries__data: + name: OpticalSeries__data + description: Images presented to subject, either grayscale or RGB + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string required: true - multivalued: false + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + value: + name: value + range: numeric any_of: - array: dimensions: @@ -266,14 +397,6 @@ classes: - alias: y - alias: r_g_b exact_cardinality: 3 - orientation: - name: orientation - description: Description of image relative to some reference frame (e.g., - which way is up). Must also specify frame of reference. - range: text - required: false - multivalued: false - tree_root: true IndexSeries: name: IndexSeries description: Stores indices to image frames stored in an ImageSeries. The purpose @@ -294,20 +417,15 @@ classes: name: data description: Index of the image (using zero-indexing) in the linked Images object. - array: - dimensions: - - alias: num_times - range: uint32 + range: IndexSeries__data required: true - multivalued: false + inlined: true indexed_timeseries: name: indexed_timeseries annotations: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: ImageSeries @@ -318,10 +436,62 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: Images - range: string tree_root: true + IndexSeries__data: + name: IndexSeries__data + description: Index of the image (using zero-indexing) in the linked Images object. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: This field is unused by IndexSeries. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: This field is unused by IndexSeries. + range: float32 + required: false + resolution: + name: resolution + description: This field is unused by IndexSeries. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: This field is unused by IndexSeries and has the value N/A. + ifabsent: string(N/A) + range: text + required: true + equals_string: N/A + value: + name: value + array: + dimensions: + - alias: num_times + range: uint32 diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml index b30070d..917e860 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.misc.yaml @@ -38,7 +38,6 @@ classes: description: Values of each feature at each time. range: AbstractFeatureSeries__data required: true - multivalued: false inlined: true feature_units: name: feature_units @@ -70,6 +69,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Since there can be different units for different features, store @@ -105,13 +149,79 @@ classes: data: name: data description: Annotations made during an experiment. + range: AnnotationSeries__data + required: true + inlined: true + tree_root: true + AnnotationSeries__data: + name: AnnotationSeries__data + description: Annotations made during an experiment. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: text - required: true - multivalued: false - tree_root: true IntervalSeries: name: IntervalSeries description: Stores intervals of data. The timestamps field stores the beginning @@ -131,13 +241,79 @@ classes: data: name: data description: Use values >0 if interval started, <0 if interval ended. + range: IntervalSeries__data + required: true + inlined: true + tree_root: true + IntervalSeries__data: + name: IntervalSeries__data + description: Use values >0 if interval started, <0 if interval ended. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data. Annotations + have no units, so the value is fixed to -1.0. + ifabsent: float(-1.0) + range: float32 + required: true + equals_number: -1 + unit: + name: unit + description: Base unit of measurement for working with the data. Annotations + have no units, so the value is fixed to 'n/a'. + ifabsent: string(n/a) + range: text + required: true + equals_string: n/a + value: + name: value array: dimensions: - alias: num_times range: int8 - required: true - multivalued: false - tree_root: true DecompositionSeries: name: DecompositionSeries description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. @@ -153,14 +329,12 @@ classes: description: Data decomposed into frequency bands. range: DecompositionSeries__data required: true - multivalued: false inlined: true metric: name: metric description: The metric used, e.g. phase, amplitude, power. range: text required: true - multivalued: false source_channels: name: source_channels annotations: @@ -173,8 +347,6 @@ classes: description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion - required: false - multivalued: false inlined: true bands: name: bands @@ -182,7 +354,6 @@ classes: from. There should be one row in this table for each band. range: DecompositionSeries__bands required: true - multivalued: false inlined: true inlined_as_list: true source_timeseries: @@ -191,8 +362,6 @@ classes: source_type: tag: source_type value: link - required: false - multivalued: false inlined: true any_of: - range: TimeSeries @@ -209,6 +378,51 @@ classes: range: string required: true equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored @@ -289,63 +503,13 @@ classes: identifier: true range: string required: true - spike_times_index: - name: spike_times_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the spike_times dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - spike_times: - name: spike_times - description: Spike times for each unit in seconds. - range: Units__spike_times - required: false - multivalued: false - inlined: true - obs_intervals_index: - name: obs_intervals_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into the obs_intervals dataset. - range: VectorIndex - required: false - multivalued: false - inlined: true - obs_intervals: - name: obs_intervals - description: Observation intervals for each unit. + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. array: - dimensions: - - alias: num_intervals - - alias: start_end - exact_cardinality: 2 - range: float64 - required: false - multivalued: false - electrodes_index: - name: electrodes_index - annotations: - named: - tag: named - value: true - source_type: - tag: source_type - value: neurodata_type_inc - description: Index into electrodes. - range: VectorIndex + minimum_number_dimensions: 1 + maximum_number_dimensions: false + range: ElectrodeGroup required: false multivalued: false inlined: true @@ -360,51 +524,69 @@ classes: value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion - required: false - multivalued: false inlined: true - electrode_group: - name: electrode_group - description: Electrode group that each spike unit came from. + electrodes_index: + name: electrodes_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into electrodes. + range: VectorIndex + inlined: true + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. array: - minimum_number_dimensions: 1 - maximum_number_dimensions: false - range: ElectrodeGroup + dimensions: + - alias: num_intervals + - alias: start_end + exact_cardinality: 2 + range: float64 required: false multivalued: false + obs_intervals_index: + name: obs_intervals_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the obs_intervals dataset. + range: VectorIndex + inlined: true + spike_times: + name: spike_times + description: Spike times for each unit in seconds. + range: Units__spike_times + inlined: true + spike_times_index: + name: spike_times_index + annotations: + named: + tag: named + value: true + source_type: + tag: source_type + value: neurodata_type_inc + description: Index into the spike_times dataset. + range: VectorIndex inlined: true waveform_mean: name: waveform_mean description: Spike waveform mean for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_mean + inlined: true waveform_sd: name: waveform_sd description: Spike waveform standard deviation for each spike unit. - range: float32 - required: false - multivalued: false - any_of: - - array: - dimensions: - - alias: num_units - - alias: num_samples - - array: - dimensions: - - alias: num_units - - alias: num_samples - - alias: num_electrodes + range: Units__waveform_sd + inlined: true waveforms: name: waveforms description: Individual waveforms for each spike on each electrode. This is @@ -430,13 +612,8 @@ classes: order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - array: - dimensions: - - alias: num_waveforms - - alias: num_samples - range: numeric - required: false - multivalued: false + range: Units__waveforms + inlined: true waveforms_index: name: waveforms_index annotations: @@ -449,8 +626,6 @@ classes: description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true waveforms_index_index: name: waveforms_index_index @@ -464,8 +639,6 @@ classes: description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex - required: false - multivalued: false inlined: true tree_root: true Units__spike_times: @@ -489,3 +662,97 @@ classes: for the spike time to be between samples. range: float64 required: false + Units__waveform_mean: + name: Units__waveform_mean + description: Spike waveform mean for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_mean) + identifier: true + range: string + required: true + equals_string: waveform_mean + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveform_sd: + name: Units__waveform_sd + description: Spike waveform standard deviation for each spike unit. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveform_sd) + identifier: true + range: string + required: true + equals_string: waveform_sd + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts + Units__waveforms: + name: Units__waveforms + description: Individual waveforms for each spike on each electrode. This is a + doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column + indexes the 'waveforms_index' column to indicate which spike events belong to + a given unit. For example, if the 'waveforms_index_index' column has values + [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond + to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' + column correspond to the 3 spike events of the second unit, and the next 1 element + of the 'waveforms_index' column corresponds to the 1 spike event of the third + unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms + that were recorded from 3 different electrodes for the first spike time of the + first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should + be the same within a given unit. The 'electrodes' column should be used to indicate + which electrodes are associated with each unit, and the order of the waveforms + within a given unit x spike event should be in the same order as the electrodes + referenced in the 'electrodes' column of this table. The number of samples for + each waveform must be the same. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(waveforms) + identifier: true + range: string + required: true + equals_string: waveforms + sampling_rate: + name: sampling_rate + description: Sampling rate, in hertz. + range: float32 + required: false + unit: + name: unit + description: Unit of measurement. This value is fixed to 'volts'. + ifabsent: string(volts) + range: text + required: false + equals_string: volts diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml index 9cc7b0d..2e71557 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml @@ -29,9 +29,89 @@ classes: description: Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents. - range: numeric + range: OptogeneticSeries__data required: true - multivalued: false + inlined: true + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + inlined: true + any_of: + - range: OptogeneticStimulusSite + - range: string + tree_root: true + OptogeneticSeries__data: + name: OptogeneticSeries__data + description: Applied power for optogenetic stimulus, in watts. Shape can be 1D + or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that + defines what the second dimension represents. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Unit of measurement for data, which is fixed to 'watts'. + ifabsent: string(watts) + range: text + required: true + equals_string: watts + value: + name: value + range: numeric any_of: - array: dimensions: @@ -40,19 +120,6 @@ classes: dimensions: - alias: num_times - alias: num_rois - site: - name: site - annotations: - source_type: - tag: source_type - value: link - required: true - multivalued: false - inlined: true - any_of: - - range: OptogeneticStimulusSite - - range: string - tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite description: A site of optogenetic stimulation. @@ -68,13 +135,11 @@ classes: description: Description of stimulation site. range: text required: true - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false location: name: location description: Location of the stimulation site. Specify the area, layer, comments @@ -82,7 +147,6 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false device: name: device annotations: @@ -90,7 +154,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml index b5d3676..61cb747 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -26,11 +26,31 @@ classes: identifier: true range: string required: true + binning: + name: binning + description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + range: uint8 + required: false + exposure_time: + name: exposure_time + description: Exposure time of the sample; often the inverse of the frequency. + range: float32 + required: false + intensity: + name: intensity + description: Intensity of the excitation in mW/mm^2, if known. + range: float32 + required: false pmt_gain: name: pmt_gain description: Photomultiplier gain. range: float32 required: false + power: + name: power + description: Power of the excitation in mW, if known. + range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology @@ -38,26 +58,6 @@ classes: be stored w/ the actual data. range: float32 required: false - exposure_time: - name: exposure_time - description: Exposure time of the sample; often the inverse of the frequency. - range: float32 - required: false - binning: - name: binning - description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. - range: uint8 - required: false - power: - name: power - description: Power of the excitation in mW, if known. - range: float32 - required: false - intensity: - name: intensity - description: Intensity of the excitation in mW/mm^2, if known. - range: float32 - required: false imaging_plane: name: imaging_plane annotations: @@ -65,7 +65,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -115,7 +114,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -135,17 +133,9 @@ classes: data: name: data description: Signals from ROIs. - range: numeric + range: RoiResponseSeries__data required: true - multivalued: false - any_of: - - array: - dimensions: - - alias: num_times - - array: - dimensions: - - alias: num_times - - alias: num_rois + inlined: true rois: name: rois annotations: @@ -159,9 +149,82 @@ classes: on the ROIs stored in this timeseries. range: DynamicTableRegion required: true - multivalued: false inlined: true tree_root: true + RoiResponseSeries__data: + name: RoiResponseSeries__data + description: Signals from ROIs. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) + range: float32 + required: false + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + required: false + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + ifabsent: float(-1.0) + range: float32 + required: false + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + required: true + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + required: false + value: + name: value + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois DfOverF: name: DfOverF description: dF/F information about a region of interest (ROI). Storage hierarchy @@ -237,6 +300,13 @@ classes: - alias: num_x - alias: num_y - alias: num_z + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + range: PlaneSegmentation__pixel_mask + inlined: true pixel_mask_index: name: pixel_mask_index annotations: @@ -248,17 +318,13 @@ classes: value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex - required: false - multivalued: false inlined: true - pixel_mask: - name: pixel_mask - description: 'Pixel masks for each ROI: a list of indices and weights for - the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' - range: PlaneSegmentation__pixel_mask - required: false - multivalued: false + range: PlaneSegmentation__voxel_mask inlined: true voxel_mask_index: name: voxel_mask_index @@ -271,17 +337,6 @@ classes: value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex - required: false - multivalued: false - inlined: true - voxel_mask: - name: voxel_mask - description: 'Voxel masks for each ROI: a list of indices and weights for - the ROI. Voxel masks are concatenated and parsing of this dataset is maintained - by the PlaneSegmentation' - range: PlaneSegmentation__voxel_mask - required: false - multivalued: false inlined: true reference_images: name: reference_images @@ -298,7 +353,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImagingPlane @@ -324,24 +378,18 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Pixel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the pixel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false PlaneSegmentation__voxel_mask: name: PlaneSegmentation__voxel_mask description: 'Voxel masks for each ROI: a list of indices and weights for the @@ -362,32 +410,24 @@ classes: array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false y: name: y description: Voxel y-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false z: name: z description: Voxel z-coordinate. array: exact_number_dimensions: 1 range: uint32 - required: false - multivalued: false weight: name: weight description: Weight of the voxel. array: exact_number_dimensions: 1 range: float32 - required: false - multivalued: false ImagingPlane: name: ImagingPlane description: An imaging plane and its metadata. @@ -402,27 +442,21 @@ classes: name: description description: Description of the imaging plane. range: text - required: false - multivalued: false excitation_lambda: name: excitation_lambda description: Excitation wavelength, in nm. range: float32 required: true - multivalued: false imaging_rate: name: imaging_rate description: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. range: float32 - required: false - multivalued: false indicator: name: indicator description: Calcium indicator. range: text required: true - multivalued: false location: name: location description: Location of the imaging plane. Specify the area, layer, comments @@ -430,15 +464,12 @@ classes: standard atlas names for anatomical regions when possible. range: text required: true - multivalued: false manifold: name: manifold description: DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. range: ImagingPlane__manifold - required: false - multivalued: false inlined: true origin_coords: name: origin_coords @@ -446,8 +477,6 @@ classes: 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). range: ImagingPlane__origin_coords - required: false - multivalued: false inlined: true grid_spacing: name: grid_spacing @@ -455,8 +484,6 @@ classes: in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. range: ImagingPlane__grid_spacing - required: false - multivalued: false inlined: true reference_frame: name: reference_frame @@ -478,8 +505,6 @@ classes: axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." range: text - required: false - multivalued: false optical_channel: name: optical_channel description: An optical channel used to record from an imaging plane. @@ -495,7 +520,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: Device @@ -628,13 +652,11 @@ classes: description: Description or other notes about the channel. range: text required: true - multivalued: false emission_lambda: name: emission_lambda description: Emission wavelength for channel, in nm. range: float32 required: true - multivalued: false tree_root: true MotionCorrection: name: MotionCorrection @@ -665,7 +687,6 @@ classes: description: Image stack with frames shifted to the common coordinates. range: ImageSeries required: true - multivalued: false inlined: true inlined_as_list: false xy_translation: @@ -674,7 +695,6 @@ classes: coordinates, for example, to align each frame to a reference image. range: TimeSeries required: true - multivalued: false inlined: true inlined_as_list: false original: @@ -684,7 +704,6 @@ classes: tag: source_type value: link required: true - multivalued: false inlined: true any_of: - range: ImageSeries diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml index 8cc1810..a376d92 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml @@ -37,30 +37,24 @@ classes: description: Phase response to stimulus on the first measured axis. range: ImagingRetinotopy__axis_1_phase_map required: true - multivalued: false inlined: true axis_1_power_map: name: axis_1_power_map description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_1_power_map - required: false - multivalued: false inlined: true axis_2_phase_map: name: axis_2_phase_map description: Phase response to stimulus on the second measured axis. range: ImagingRetinotopy__axis_2_phase_map required: true - multivalued: false inlined: true axis_2_power_map: name: axis_2_power_map description: Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: ImagingRetinotopy__axis_2_power_map - required: false - multivalued: false inlined: true axis_descriptions: name: axis_descriptions @@ -79,16 +73,12 @@ classes: description: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].' range: ImagingRetinotopy__focal_depth_image - required: false - multivalued: false inlined: true sign_map: name: sign_map description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: ImagingRetinotopy__sign_map - required: false - multivalued: false inlined: true vasculature_image: name: vasculature_image @@ -96,7 +86,6 @@ classes: [rows][columns]' range: ImagingRetinotopy__vasculature_image required: true - multivalued: false inlined: true tree_root: true ImagingRetinotopy__axis_1_phase_map: diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml index 7e1a614..de0d90a 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,11 +177,4 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml index f8adba6..b8e1134 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,13 +177,6 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true AlignedDynamicTable: name: AlignedDynamicTable diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml index 52b119d..513a5d4 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,13 +177,6 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true AlignedDynamicTable: name: AlignedDynamicTable diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml index 85675e7..5613666 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,13 +177,6 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true AlignedDynamicTable: name: AlignedDynamicTable diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml index 9ffb97d..36dd411 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,13 +177,6 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true AlignedDynamicTable: name: AlignedDynamicTable diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml index 940f1b7..c7f3d0d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml @@ -116,6 +116,11 @@ classes: identifier: true range: string required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true table: name: table description: Reference to the DynamicTable object that this region applies @@ -123,11 +128,6 @@ classes: range: DynamicTable required: true inlined: true - description: - name: description - description: Description of what this table region points to. - range: text - required: true tree_root: true DynamicTable: name: DynamicTable @@ -177,13 +177,6 @@ classes: range: int required: true multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - inlined: true tree_root: true AlignedDynamicTable: name: AlignedDynamicTable diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index a962b8f..a8d955d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -31,21 +31,18 @@ classes: resources. range: ExternalResources__keys required: true - multivalued: false inlined: true entities: name: entities description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__entities required: true - multivalued: false inlined: true resources: name: resources description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__resources required: true - multivalued: false inlined: true objects: name: objects @@ -53,14 +50,12 @@ classes: to external resources. range: ExternalResources__objects required: true - multivalued: false inlined: true object_keys: name: object_keys description: A table for identifying which objects use which keys. range: ExternalResources__object_keys required: true - multivalued: false inlined: true tree_root: true ExternalResources__keys: @@ -84,7 +79,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__entities: name: ExternalResources__entities description: A table for mapping user terms (i.e., keys) to resource entities. @@ -104,7 +98,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false resources_idx: name: resources_idx description: The index into the 'resources' table @@ -112,7 +105,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false entity_id: name: entity_id description: The unique identifier entity. @@ -120,7 +112,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false entity_uri: name: entity_uri description: The URI for the entity this reference applies to. This can be @@ -129,7 +120,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__resources: name: ExternalResources__resources description: A table for mapping user terms (i.e., keys) to resource entities. @@ -149,7 +139,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false resource_uri: name: resource_uri description: The URI for the resource. This can be an empty string. @@ -157,7 +146,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__objects: name: ExternalResources__objects description: A table for identifying which objects in a file contain references @@ -178,7 +166,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false field: name: field description: The field of the object. This can be an empty string if the object @@ -187,7 +174,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__object_keys: name: ExternalResources__object_keys description: A table for identifying which objects use which keys. @@ -208,7 +194,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The index to the 'keys' table for the key. @@ -216,4 +201,3 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml index 89023ae..4aadb91 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml @@ -31,21 +31,18 @@ classes: resources. range: ExternalResources__keys required: true - multivalued: false inlined: true entities: name: entities description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__entities required: true - multivalued: false inlined: true resources: name: resources description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__resources required: true - multivalued: false inlined: true objects: name: objects @@ -53,14 +50,12 @@ classes: to external resources. range: ExternalResources__objects required: true - multivalued: false inlined: true object_keys: name: object_keys description: A table for identifying which objects use which keys. range: ExternalResources__object_keys required: true - multivalued: false inlined: true tree_root: true ExternalResources__keys: @@ -84,7 +79,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__entities: name: ExternalResources__entities description: A table for mapping user terms (i.e., keys) to resource entities. @@ -104,7 +98,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false resources_idx: name: resources_idx description: The index into the 'resources' table @@ -112,7 +105,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false entity_id: name: entity_id description: The unique identifier entity. @@ -120,7 +112,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false entity_uri: name: entity_uri description: The URI for the entity this reference applies to. This can be @@ -129,7 +120,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__resources: name: ExternalResources__resources description: A table for mapping user terms (i.e., keys) to resource entities. @@ -149,7 +139,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false resource_uri: name: resource_uri description: The URI for the resource. This can be an empty string. @@ -157,7 +146,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__objects: name: ExternalResources__objects description: A table for identifying which objects in a file contain references @@ -178,7 +166,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false relative_path: name: relative_path description: The relative path from the container with the object_id to the @@ -189,7 +176,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false field: name: field description: The field of the compound data type using an external resource. @@ -199,7 +185,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__object_keys: name: ExternalResources__object_keys description: A table for identifying which objects use which keys. @@ -220,7 +205,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The index to the 'keys' table for the key. @@ -228,4 +212,3 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml index d0909a2..52d014f 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml @@ -31,21 +31,18 @@ classes: resources. range: ExternalResources__keys required: true - multivalued: false inlined: true files: name: files description: A table for storing object ids of files used in external resources. range: ExternalResources__files required: true - multivalued: false inlined: true entities: name: entities description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__entities required: true - multivalued: false inlined: true objects: name: objects @@ -53,14 +50,12 @@ classes: to external resources. range: ExternalResources__objects required: true - multivalued: false inlined: true object_keys: name: object_keys description: A table for identifying which objects use which keys. range: ExternalResources__object_keys required: true - multivalued: false inlined: true tree_root: true ExternalResources__keys: @@ -84,7 +79,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__files: name: ExternalResources__files description: A table for storing object ids of files used in external resources. @@ -105,7 +99,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__entities: name: ExternalResources__entities description: A table for mapping user terms (i.e., keys) to resource entities. @@ -125,7 +118,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false entity_id: name: entity_id description: The compact uniform resource identifier (CURIE) of the entity, @@ -134,7 +126,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false entity_uri: name: entity_uri description: The URI for the entity this reference applies to. This can be @@ -143,7 +134,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__objects: name: ExternalResources__objects description: A table for identifying which objects in a file contain references @@ -165,7 +155,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false object_id: name: object_id description: The object id (UUID) of the object. @@ -173,7 +162,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false object_type: name: object_type description: The data type of the object. @@ -181,7 +169,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false relative_path: name: relative_path description: The relative path from the data object with the `object_id` to @@ -192,7 +179,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false field: name: field description: The field within the compound data type using an external resource. @@ -202,7 +188,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__object_keys: name: ExternalResources__object_keys description: A table for identifying which objects use which keys. @@ -223,7 +208,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. @@ -231,4 +215,3 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml index 75f3938..bc36101 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml @@ -31,21 +31,18 @@ classes: resources. range: ExternalResources__keys required: true - multivalued: false inlined: true files: name: files description: A table for storing object ids of files used in external resources. range: ExternalResources__files required: true - multivalued: false inlined: true entities: name: entities description: A table for mapping user terms (i.e., keys) to resource entities. range: ExternalResources__entities required: true - multivalued: false inlined: true objects: name: objects @@ -53,21 +50,18 @@ classes: to external resources. range: ExternalResources__objects required: true - multivalued: false inlined: true object_keys: name: object_keys description: A table for identifying which objects use which keys. range: ExternalResources__object_keys required: true - multivalued: false inlined: true entity_keys: name: entity_keys description: A table for identifying which keys use which entity. range: ExternalResources__entity_keys required: true - multivalued: false inlined: true tree_root: true ExternalResources__keys: @@ -91,7 +85,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__files: name: ExternalResources__files description: A table for storing object ids of files used in external resources. @@ -112,7 +105,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__entities: name: ExternalResources__entities description: A table for mapping user terms (i.e., keys) to resource entities. @@ -133,7 +125,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false entity_uri: name: entity_uri description: The URI for the entity this reference applies to. This can be @@ -142,7 +133,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__objects: name: ExternalResources__objects description: A table for identifying which objects in a file contain references @@ -164,7 +154,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false object_id: name: object_id description: The object id (UUID) of the object. @@ -172,7 +161,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false object_type: name: object_type description: The data type of the object. @@ -180,7 +168,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false relative_path: name: relative_path description: The relative path from the data object with the `object_id` to @@ -191,7 +178,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false field: name: field description: The field within the compound data type using an external resource. @@ -201,7 +187,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false ExternalResources__object_keys: name: ExternalResources__object_keys description: A table for identifying which objects use which keys. @@ -222,7 +207,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. @@ -230,7 +214,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false ExternalResources__entity_keys: name: ExternalResources__entity_keys description: A table for identifying which keys use which entity. @@ -250,7 +233,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. @@ -258,4 +240,3 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml index dcaf960..6dfe7fe 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml @@ -30,21 +30,18 @@ classes: resources. range: HERD__keys required: true - multivalued: false inlined: true files: name: files description: A table for storing object ids of files used in external resources. range: HERD__files required: true - multivalued: false inlined: true entities: name: entities description: A table for mapping user terms (i.e., keys) to resource entities. range: HERD__entities required: true - multivalued: false inlined: true objects: name: objects @@ -52,21 +49,18 @@ classes: to external resources. range: HERD__objects required: true - multivalued: false inlined: true object_keys: name: object_keys description: A table for identifying which objects use which keys. range: HERD__object_keys required: true - multivalued: false inlined: true entity_keys: name: entity_keys description: A table for identifying which keys use which entity. range: HERD__entity_keys required: true - multivalued: false inlined: true tree_root: true HERD__keys: @@ -90,7 +84,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false HERD__files: name: HERD__files description: A table for storing object ids of files used in external resources. @@ -111,7 +104,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false HERD__entities: name: HERD__entities description: A table for mapping user terms (i.e., keys) to resource entities. @@ -132,7 +124,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false entity_uri: name: entity_uri description: The URI for the entity this reference applies to. This can be @@ -141,7 +132,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false HERD__objects: name: HERD__objects description: A table for identifying which objects in a file contain references @@ -163,7 +153,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false object_id: name: object_id description: The object id (UUID) of the object. @@ -171,7 +160,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false object_type: name: object_type description: The data type of the object. @@ -179,7 +167,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false relative_path: name: relative_path description: The relative path from the data object with the `object_id` to @@ -190,7 +177,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false field: name: field description: The field within the compound data type using an external resource. @@ -200,7 +186,6 @@ classes: exact_number_dimensions: 1 range: text required: true - multivalued: false HERD__object_keys: name: HERD__object_keys description: A table for identifying which objects use which keys. @@ -221,7 +206,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. @@ -229,7 +213,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false HERD__entity_keys: name: HERD__entity_keys description: A table for identifying which keys use which entity. @@ -249,7 +232,6 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. @@ -257,4 +239,3 @@ classes: exact_number_dimensions: 1 range: uint required: true - multivalued: false From 734088f18e72ffc2746de9206786a324cd0a798a Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 19 Sep 2024 19:28:17 -0700 Subject: [PATCH 07/18] remove commented out pdb call, add more inline docs to rolldown --- nwb_linkml/src/nwb_linkml/adapters/namespaces.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 70e6f89..1db8bbb 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -178,6 +178,11 @@ class NamespacesAdapter(Adapter): nwb-schema-language inheritance doesn't work like normal python inheritance - instead of inheriting everything at the 'top level' of a class, it also recursively merges all properties from the parent objects. + + While this operation does not take care to modify classes in a way that respect their order + (i.e. roll down ancestor classes first, in order, before the leaf classes), + it doesn't matter - this method should be both idempotent and order insensitive + for a given source schema. References: https://github.com/NeurodataWithoutBorders/pynwb/issues/1954 @@ -191,11 +196,9 @@ class NamespacesAdapter(Adapter): # merge and cast new_cls: dict = {} for i, parent in enumerate(parents): - # if parent.neurodata_type_def == "PatchClampSeries": - # pdb.set_trace() - complete = True - if i == len(parents) - 1: - complete = False + # we want a full roll-down of all the ancestor classes, + # but we make an abbreviated leaf class + complete = False if i == len(parents) - 1 else True new_cls = roll_down_nwb_class(new_cls, parent, complete=complete) new_cls: Group | Dataset = type(cls)(**new_cls) new_cls.parent = cls.parent From 8993014832a9b9e2cd2a5e9641b395f5663996b6 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 19 Sep 2024 22:43:29 -0700 Subject: [PATCH 08/18] make the tests pass again - add validator to do the opposite of coerce_value, try to pass the input as the ``value`` field of the model. fix test assumptions and model creation. model update to follow --- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 2 -- .../src/nwb_linkml/adapters/namespaces.py | 6 ++--- .../src/nwb_linkml/generators/pydantic.py | 2 ++ nwb_linkml/src/nwb_linkml/includes/base.py | 26 +++++++++++++++++-- .../tests/test_adapters/test_adapter.py | 2 +- .../test_adapters/test_adapter_namespaces.py | 8 +++--- nwb_linkml/tests/test_includes/conftest.py | 4 +-- 7 files changed, 37 insertions(+), 13 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 7b391de..0558862 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -59,9 +59,7 @@ class MapScalar(DatasetMap): slots: - name: MyScalar description: A scalar - multivalued: false range: int32 - required: false """ diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 1db8bbb..6aa68ad 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -178,7 +178,7 @@ class NamespacesAdapter(Adapter): nwb-schema-language inheritance doesn't work like normal python inheritance - instead of inheriting everything at the 'top level' of a class, it also recursively merges all properties from the parent objects. - + While this operation does not take care to modify classes in a way that respect their order (i.e. roll down ancestor classes first, in order, before the leaf classes), it doesn't matter - this method should be both idempotent and order insensitive @@ -196,8 +196,8 @@ class NamespacesAdapter(Adapter): # merge and cast new_cls: dict = {} for i, parent in enumerate(parents): - # we want a full roll-down of all the ancestor classes, - # but we make an abbreviated leaf class + # we want a full roll-down of all the ancestor classes, + # but we make an abbreviated leaf class complete = False if i == len(parents) - 1 else True new_cls = roll_down_nwb_class(new_cls, parent, complete=complete) new_cls: Group | Dataset = type(cls)(**new_cls) diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 927e9c2..f4c1c9e 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -26,6 +26,7 @@ from linkml_runtime.utils.formatutils import remove_empty_items from linkml_runtime.utils.schemaview import SchemaView from nwb_linkml.includes.base import ( + BASEMODEL_CAST_WITH_VALUE, BASEMODEL_COERCE_CHILD, BASEMODEL_COERCE_VALUE, BASEMODEL_GETITEM, @@ -55,6 +56,7 @@ class NWBPydanticGenerator(PydanticGenerator): 'object_id: Optional[str] = Field(None, description="Unique UUID for each object")', BASEMODEL_GETITEM, BASEMODEL_COERCE_VALUE, + BASEMODEL_CAST_WITH_VALUE, BASEMODEL_COERCE_CHILD, ) split: bool = True diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index 3ecae8c..75b5ca6 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -16,7 +16,7 @@ BASEMODEL_GETITEM = """ BASEMODEL_COERCE_VALUE = """ @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: \"\"\"Try to rescue instantiation by using the value field\"\"\" try: return handler(v) @@ -27,7 +27,29 @@ BASEMODEL_COERCE_VALUE = """ try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}" + ) from e1 +""" + +BASEMODEL_CAST_WITH_VALUE = """ + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + \"\"\"Try to rescue instantiation by casting into the model's value fiel\"\"\" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}" + ) from e1 """ BASEMODEL_COERCE_CHILD = """ diff --git a/nwb_linkml/tests/test_adapters/test_adapter.py b/nwb_linkml/tests/test_adapters/test_adapter.py index 4514f5d..b3fdb27 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter.py +++ b/nwb_linkml/tests/test_adapters/test_adapter.py @@ -54,7 +54,7 @@ def test_walk_field_values(nwb_core_fixture): text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, "dtype", value="text")) assert all([d.dtype == "text" for d in text_models]) # 135 known value from regex search - assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 135 + assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 155 def test_build_result(linkml_schema_bare): diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py index 2052778..4c8de11 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py @@ -135,8 +135,9 @@ def test_roll_down_inheritance(): child = child_ns_adapter.get("Child") # overrides simple attrs assert child.doc == "child" - # gets unassigned parent attrs - assert "b" in [attr.name for attr in child.attributes] + # we don't receive attrs that aren't overridden in the child, + # instead we let python/linkml inheritance handle that for us + assert "b" not in [attr.name for attr in child.attributes] # overrides values while preserving remaining values when set attr_a = [attr for attr in child.attributes if attr.name == "a"][0] assert attr_a.value == "z" @@ -146,7 +147,8 @@ def test_roll_down_inheritance(): # preserve unset values in child datasets assert child.datasets[0].dtype == parent_cls.datasets[0].dtype assert child.datasets[0].dims == parent_cls.datasets[0].dims - # gets undeclared attrs in child datasets + # we *do* get undeclared attrs in child datasets, + # since those are not handled by python/linkml inheritance assert "d" in [attr.name for attr in child.datasets[0].attributes] # overrides set values in child datasets while preserving unset c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0] diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py index 53e3a39..1a801ae 100644 --- a/nwb_linkml/tests/test_includes/conftest.py +++ b/nwb_linkml/tests/test_includes/conftest.py @@ -114,14 +114,14 @@ def _icephys_stimulus_and_response( n_samples = generator.integers(20, 50) stimulus = VoltageClampStimulusSeries( name=f"vcss_{i}", - data=VoltageClampStimulusSeriesData(value=[i] * n_samples), + data=VoltageClampStimulusSeriesData(value=np.array([i] * n_samples, dtype=float)), stimulus_description=f"{i}", sweep_number=i, electrode=electrode, ) response = VoltageClampSeries( name=f"vcs_{i}", - data=VoltageClampSeriesData(value=[i] * n_samples), + data=VoltageClampSeriesData(value=np.array([i] * n_samples, dtype=float)), stimulus_description=f"{i}", electrode=electrode, ) From fc6f60ad6147b587f6ad210925d12a57ae2e7a47 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 25 Sep 2024 20:41:53 -0700 Subject: [PATCH 09/18] fix hdmf inheritance during testing, error handling --- nwb_linkml/pdm.lock | 28 ++++---- nwb_linkml/pyproject.toml | 2 +- .../src/nwb_linkml/generators/pydantic.py | 14 ++-- nwb_linkml/src/nwb_linkml/includes/base.py | 6 +- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 61 ++++++++++++---- nwb_linkml/src/nwb_linkml/lang_elements.py | 3 +- nwb_linkml/src/nwb_linkml/maps/__init__.py | 3 +- nwb_linkml/src/nwb_linkml/maps/dtype.py | 6 ++ nwb_linkml/tests/test_includes/test_hdmf.py | 72 +++++++++++-------- 9 files changed, 129 insertions(+), 66 deletions(-) diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index f6f2c7c..36e3896 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "plot", "tests"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:1c297e11f6dc9e4f6b8d29df872177d2ce65bbd334c0b65aa5175dfb125c4d9f" +content_hash = "sha256:14dd3d0b396dc25e554b924825664346d2644f265e48346180f1cfdf833a8c92" [[metadata.targets]] requires_python = ">=3.10,<3.13" @@ -1038,9 +1038,9 @@ files = [ [[package]] name = "numpydantic" -version = "1.3.3" +version = "1.6.0" requires_python = "<4.0,>=3.9" -summary = "Type and shape validation and serialization for numpy arrays in pydantic models" +summary = "Type and shape validation and serialization for arbitrary array types in pydantic models" groups = ["default"] dependencies = [ "numpy>=1.24.0", @@ -1048,13 +1048,13 @@ dependencies = [ "typing-extensions>=4.11.0; python_version < \"3.11\"", ] files = [ - {file = "numpydantic-1.3.3-py3-none-any.whl", hash = "sha256:e002767252b1b77abb7715834ab7cbf58964baddae44863710f09e71b23287e4"}, - {file = "numpydantic-1.3.3.tar.gz", hash = "sha256:1cc2744f7b5fbcecd51a64fafaf8c9a564bb296336a566a16be97ba7b1c28698"}, + {file = "numpydantic-1.6.0-py3-none-any.whl", hash = "sha256:72f3ef0bc8a5801bac6fb79920467d763d51cddec8476875efeb5064c11c04cf"}, + {file = "numpydantic-1.6.0.tar.gz", hash = "sha256:9785ba7eb5489b9e5438109e9b2dcd1cc0aa87d1b6b5df71fb906dc0708df83c"}, ] [[package]] name = "nwb-models" -version = "0.1.0" +version = "0.2.0" requires_python = ">=3.10" summary = "Pydantic/LinkML models for Neurodata Without Borders" groups = ["default"] @@ -1064,23 +1064,23 @@ dependencies = [ "pydantic>=2.3.0", ] files = [ - {file = "nwb_models-0.1.0-py3-none-any.whl", hash = "sha256:d485422865f6762586e8f8389d67bce17a3e66d07f6273385a751145afbbbfea"}, - {file = "nwb_models-0.1.0.tar.gz", hash = "sha256:3c3ccfc6c2ac03dffe26ba7f180aecc650d6593c05d4f306f84b90fabc3ff2b8"}, + {file = "nwb_models-0.2.0-py3-none-any.whl", hash = "sha256:72bb8a8879261488071d4e8eff35f2cbb20c44ac4bb7f67806c6329b4f8b2068"}, + {file = "nwb_models-0.2.0.tar.gz", hash = "sha256:7e7f280378c668e1695dd9d53b32073d85615e90fee0ec417888dd83bdb9cbb3"}, ] [[package]] name = "nwb-schema-language" -version = "0.1.3" -requires_python = ">=3.9,<4.0" +version = "0.2.0" +requires_python = "<3.13,>=3.10" summary = "Translation of the nwb-schema-language to LinkML" groups = ["default"] dependencies = [ - "linkml-runtime<2.0.0,>=1.1.24", - "pydantic<3.0.0,>=2.3.0", + "linkml-runtime>=1.7.7", + "pydantic>=2.3.0", ] files = [ - {file = "nwb_schema_language-0.1.3-py3-none-any.whl", hash = "sha256:2eb86aac6614d490f7ec3fa68634bb9dceb3834d9820f5afc5645a9f3b0c3401"}, - {file = "nwb_schema_language-0.1.3.tar.gz", hash = "sha256:ad290e2896a9cde7e2f353bc3b8ddf42be865238d991167d397ff2e0d03c88ba"}, + {file = "nwb_schema_language-0.2.0-py3-none-any.whl", hash = "sha256:354afb0abfbc61a6d6b227695b9a4312df5030f2746b517fc5849ac085c8e5f2"}, + {file = "nwb_schema_language-0.2.0.tar.gz", hash = "sha256:59beda56ea52a55f4514d7e4b73e30ceaee1c60b7ddf4fc80afd48777acf9e50"}, ] [[package]] diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index edf3579..2670310 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "pydantic-settings>=2.0.3", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', - "numpydantic>=1.5.0", + "numpydantic>=1.6.0", "black>=24.4.2", "pandas>=2.2.2", "networkx>=3.3", diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index f4c1c9e..336bbf8 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -9,7 +9,7 @@ import re from dataclasses import dataclass, field from pathlib import Path from types import ModuleType -from typing import Callable, ClassVar, Dict, List, Literal, Optional, Tuple +from typing import Callable, ClassVar, Dict, List, Optional, Tuple from linkml.generators import PydanticGenerator from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray @@ -72,7 +72,7 @@ class NWBPydanticGenerator(PydanticGenerator): emit_metadata: bool = True gen_classvars: bool = True gen_slots: bool = True - extra_fields: Literal["allow", "forbid", "ignore"] = "allow" + # extra_fields: Literal["allow", "forbid", "ignore"] = "allow" skip_meta: ClassVar[Tuple[str]] = ("domain_of", "alias") @@ -269,7 +269,7 @@ class AfterGenerateClass: """ if cls.cls.name == "DynamicTable": - cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"] + cls.cls.bases = ["DynamicTableMixin"] if ( cls.injected_classes is None @@ -287,18 +287,18 @@ class AfterGenerateClass: else: # pragma: no cover - for completeness, shouldn't happen cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy() elif cls.cls.name == "VectorData": - cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"] + cls.cls.bases = ["VectorDataMixin"] # make ``value`` generic on T if "value" in cls.cls.attributes: cls.cls.attributes["value"].range = "Optional[T]" elif cls.cls.name == "VectorIndex": - cls.cls.bases = ["VectorIndexMixin", "ConfiguredBaseModel"] + cls.cls.bases = ["VectorIndexMixin"] elif cls.cls.name == "DynamicTableRegion": - cls.cls.bases = ["DynamicTableRegionMixin", "VectorData", "ConfiguredBaseModel"] + cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"] elif cls.cls.name == "AlignedDynamicTable": cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"] elif cls.cls.name == "ElementIdentifiers": - cls.cls.bases = ["ElementIdentifiersMixin", "Data", "ConfiguredBaseModel"] + cls.cls.bases = ["ElementIdentifiersMixin", "Data"] # make ``value`` generic on T if "value" in cls.cls.attributes: cls.cls.attributes["value"].range = "Optional[T]" diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index 75b5ca6..d77a759 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -30,7 +30,8 @@ BASEMODEL_COERCE_VALUE = """ raise ValueError( f"coerce_value: Could not use the value field of {type(v)} " f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}" + f"expected type: {cls.model_fields[info.field_name].annotation}\\n" + f"inner error: {str(e1)}" ) from e1 """ @@ -48,7 +49,8 @@ BASEMODEL_CAST_WITH_VALUE = """ raise ValueError( f"cast_with_value: Could not cast {type(v)} as value field for " f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}" + f" expected_type: {cls.model_fields[info.field_name].annotation}\\n" + f"inner error: {str(e1)}" ) from e1 """ diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 7a7d294..df73d68 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -39,8 +39,30 @@ if TYPE_CHECKING: # pragma: no cover T = TypeVar("T", bound=NDArray) T_INJECT = 'T = TypeVar("T", bound=NDArray)' +if "pytest" in sys.modules: + from nwb_models.models import ConfiguredBaseModel +else: -class DynamicTableMixin(BaseModel): + class ConfiguredBaseModel(BaseModel): + """ + Dummy ConfiguredBaseModel (without its methods from :mod:`.includes.base` ) + used so that the injected mixins inherit from the `ConfiguredBaseModel` + and we get a linear inheritance MRO (rather than needing to inherit + from the mixins *and* the configured base model) so that the + model_config is correctly resolved (ie. to allow extra args) + """ + + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + + +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -295,13 +317,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -364,18 +392,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -426,7 +457,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -518,7 +549,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -574,7 +605,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -927,12 +958,18 @@ if "pytest" in sys.modules: class VectorData(VectorDataMixin): """VectorData subclass for testing""" - pass + name: str = Field(...) + description: str = Field( + ..., description="""Description of what these vectors represent.""" + ) class VectorIndex(VectorIndexMixin): """VectorIndex subclass for testing""" - pass + name: str = Field(...) + description: str = Field( + ..., description="""Description of what these vectors represent.""" + ) class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """DynamicTableRegion subclass for testing""" diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py index c199062..fdde634 100644 --- a/nwb_linkml/src/nwb_linkml/lang_elements.py +++ b/nwb_linkml/src/nwb_linkml/lang_elements.py @@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import ( TypeDefinition, ) -from nwb_linkml.maps import flat_to_linkml +from nwb_linkml.maps import flat_to_linkml, linkml_reprs def _make_dtypes() -> List[TypeDefinition]: @@ -36,6 +36,7 @@ def _make_dtypes() -> List[TypeDefinition]: name=nwbtype, minimum_value=amin, typeof=linkmltype, # repr=repr_string + repr=linkml_reprs.get(nwbtype, None), ) DTypeTypes.append(atype) return DTypeTypes diff --git a/nwb_linkml/src/nwb_linkml/maps/__init__.py b/nwb_linkml/src/nwb_linkml/maps/__init__.py index 8b01447..cdad7d0 100644 --- a/nwb_linkml/src/nwb_linkml/maps/__init__.py +++ b/nwb_linkml/src/nwb_linkml/maps/__init__.py @@ -2,7 +2,7 @@ Mapping from one domain to another """ -from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np +from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np, linkml_reprs from nwb_linkml.maps.map import Map from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC from nwb_linkml.maps.quantity import QUANTITY_MAP @@ -14,4 +14,5 @@ __all__ = [ "Map", "flat_to_linkml", "flat_to_np", + "linkml_reprs", ] diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 6d944dd..95cb296 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -39,6 +39,12 @@ flat_to_linkml = { Map between the flat data types and the simpler linkml base types """ +linkml_reprs = {"numeric": "float | int"} +""" +``repr`` fields used in the nwb language elements injected in every namespace +that give the nwb type a specific representation in the generated pydantic models +""" + flat_to_np = { "float": float, "float32": np.float32, diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index a8b14b7..349a93f 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -149,8 +149,8 @@ def test_dynamictable_mixin_colnames_index(): cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(10)), - "new_col_2": hdmf.VectorData(value=np.arange(10)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)), + "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)), } # explicit index with mismatching name cols["weirdname_index"] = VectorIndexMixin(value=np.arange(10), target=cols["new_col_1"]) @@ -171,9 +171,9 @@ def test_dynamictable_mixin_colnames_ordered(): cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(10)), - "new_col_2": hdmf.VectorData(value=np.arange(10)), - "new_col_3": hdmf.VectorData(value=np.arange(10)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)), + "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)), + "new_col_3": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)), } order = ["new_col_2", "existing_col", "new_col_1", "new_col_3"] @@ -198,7 +198,7 @@ def test_dynamictable_mixin_getattr(): class MyDT(DynamicTableMixin): existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] - col = hdmf.VectorData(value=np.arange(10)) + col = hdmf.VectorData(name="existing_col", description="", value=np.arange(10)) inst = MyDT(existing_col=col) # regular lookup for attrs that exist @@ -257,13 +257,17 @@ def test_dynamictable_resolve_index(): cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(10)), - "new_col_2": hdmf.VectorData(value=np.arange(10)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)), + "new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)), } # explicit index with mismatching name - cols["weirdname_index"] = hdmf.VectorIndex(value=np.arange(10), target=cols["new_col_1"]) + cols["weirdname_index"] = hdmf.VectorIndex( + name="weirdname_index", description="", value=np.arange(10), target=cols["new_col_1"] + ) # implicit index with matching name - cols["new_col_2_index"] = hdmf.VectorIndex(value=np.arange(10)) + cols["new_col_2_index"] = hdmf.VectorIndex( + name="new_col_2_index", description="", value=np.arange(10) + ) inst = MyDT(**cols) assert inst.weirdname_index.target is inst.new_col_1 @@ -282,14 +286,14 @@ def test_dynamictable_assert_equal_length(): cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(11)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(11)), } with pytest.raises(ValidationError, match="columns are not of equal length"): _ = MyDT(**cols) cols = { "existing_col": np.arange(11), - "new_col_1": hdmf.VectorData(value=np.arange(10)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)), } with pytest.raises(ValidationError, match="columns are not of equal length"): _ = MyDT(**cols) @@ -297,16 +301,20 @@ def test_dynamictable_assert_equal_length(): # wrong lengths are fine as long as the index is good cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(100)), - "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 10) + 10), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)), + "new_col_1_index": hdmf.VectorIndex( + name="new_col_1_index", description="", value=np.arange(0, 100, 10) + 10 + ), } _ = MyDT(**cols) # but not fine if the index is not good cols = { "existing_col": np.arange(10), - "new_col_1": hdmf.VectorData(value=np.arange(100)), - "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)), + "new_col_1_index": hdmf.VectorIndex( + name="new_col_1_index", description="", value=np.arange(0, 100, 5) + 5 + ), } with pytest.raises(ValidationError, match="columns are not of equal length"): _ = MyDT(**cols) @@ -321,8 +329,8 @@ def test_dynamictable_setattr(): existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] cols = { - "existing_col": hdmf.VectorData(value=np.arange(10)), - "new_col_1": hdmf.VectorData(value=np.arange(10)), + "existing_col": hdmf.VectorData(name="existing_col", description="", value=np.arange(10)), + "new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)), } inst = MyDT(existing_col=cols["existing_col"]) assert inst.colnames == ["existing_col"] @@ -335,7 +343,7 @@ def test_dynamictable_setattr(): # model validators should be called to ensure equal length with pytest.raises(ValidationError): - inst.new_col_2 = hdmf.VectorData(value=np.arange(11)) + inst.new_col_2 = hdmf.VectorData(name="new_col_2", description="", value=np.arange(11)) def test_vectordata_indexing(): @@ -346,7 +354,7 @@ def test_vectordata_indexing(): value_array, index_array = _ragged_array(n_rows) value_array = np.concatenate(value_array) - data = hdmf.VectorData(value=value_array) + data = hdmf.VectorData(name="data", description="", value=value_array) # before we have an index, things should work as normal, indexing a 1D array assert data[0] == 0 @@ -356,7 +364,7 @@ def test_vectordata_indexing(): data[0] = 0 # indexes by themselves are the same - index_notarget = hdmf.VectorIndex(value=index_array) + index_notarget = hdmf.VectorIndex(name="no_target_index", description="", value=index_array) assert index_notarget[0] == index_array[0] assert all(index_notarget[0:3] == index_array[0:3]) oldval = index_array[0] @@ -364,7 +372,7 @@ def test_vectordata_indexing(): assert index_notarget[0] == 5 index_notarget[0] = oldval - index = hdmf.VectorIndex(value=index_array, target=data) + index = hdmf.VectorIndex(name="data_index", description="", value=index_array, target=data) data._index = index # after an index, both objects should index raggedly @@ -396,8 +404,10 @@ def test_vectordata_getattr(): """ VectorData and VectorIndex both forward getattr to ``value`` """ - data = hdmf.VectorData(value=np.arange(100)) - index = hdmf.VectorIndex(value=np.arange(10, 101, 10), target=data) + data = hdmf.VectorData(name="data", description="", value=np.arange(100)) + index = hdmf.VectorIndex( + name="data_index", description="", value=np.arange(10, 101, 10), target=data + ) # get attrs that we defined on the models # i.e. no attribute errors here @@ -447,7 +457,9 @@ def test_dynamictable_region_indexing(basic_table): index = np.array([9, 4, 8, 3, 7, 2, 6, 1, 5, 0]) - table_region = hdmf.DynamicTableRegion(value=index, table=inst) + table_region = hdmf.DynamicTableRegion( + name="table_region", description="", value=index, table=inst + ) row = table_region[1] assert all(row.iloc[0] == index[1]) @@ -499,10 +511,14 @@ def test_dynamictable_region_ragged(): timeseries_index=spike_idx, ) region = hdmf.DynamicTableRegion( + name="region", + description="a table region what else would it be", table=table, value=value, ) - index = hdmf.VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx) + index = hdmf.VectorIndex( + name="region_index", description="hgggggggjjjj", target=region, value=idx + ) region._index = index rows = region[1] @@ -594,8 +610,8 @@ def test_mixed_aligned_dynamictable(aligned_table): value_array, index_array = _ragged_array(10) value_array = np.concatenate(value_array) - data = hdmf.VectorData(value=value_array) - index = hdmf.VectorIndex(value=index_array) + data = hdmf.VectorData(name="data", description="", value=value_array) + index = hdmf.VectorIndex(name="data_index", description="", value=index_array) atable = AlignedTable(**cols, extra_col=data, extra_col_index=index) atable[0] From 911a3ddb61b77539c857f08fda7a74ee2a33621c Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 25 Sep 2024 21:18:09 -0700 Subject: [PATCH 10/18] cast to value in container classes --- docs/meta/todo.md | 3 +++ nwb_linkml/src/nwb_linkml/adapters/group.py | 2 +- nwb_linkml/src/nwb_linkml/includes/base.py | 5 ++++- nwb_linkml/src/nwb_linkml/io/hdf5.py | 21 +++++++++++++++++++-- 4 files changed, 27 insertions(+), 4 deletions(-) diff --git a/docs/meta/todo.md b/docs/meta/todo.md index dd9f750..9199d22 100644 --- a/docs/meta/todo.md +++ b/docs/meta/todo.md @@ -53,6 +53,9 @@ Loading - [ ] Top-level containers are still a little janky, eg. how `ProcessingModule` just accepts extra args rather than properly abstracting `value` as a `__getitem__(self, key) -> T:` +Changes to linkml +- [ ] Allow parameterizing "extra" fields, so we don't have to stuff things into `value` dicts + ## Docs TODOs ```{todolist} diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py index f0e44ea..fb919d0 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/group.py +++ b/nwb_linkml/src/nwb_linkml/adapters/group.py @@ -129,7 +129,7 @@ class GroupAdapter(ClassAdapter): # We are a top-level container class like ProcessingModule base = self.build_base() # remove all the attributes and replace with child slot - base.classes[0].attributes = [slot] + base.classes[0].attributes.append(slot) return base def handle_container_slot(self, cls: Group) -> BuildResult: diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index d77a759..2a6e84d 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -65,7 +65,10 @@ BASEMODEL_COERCE_CHILD = """ annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py index bf4fbe6..1691a46 100644 --- a/nwb_linkml/src/nwb_linkml/io/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py @@ -35,7 +35,7 @@ import h5py import networkx as nx import numpy as np from numpydantic.interface.hdf5 import H5ArrayPath -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError from tqdm import tqdm from nwb_linkml.maps.hdf5 import ( @@ -167,7 +167,24 @@ def _load_node( if "neurodata_type" in obj.attrs: model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"]) - return model(**args) + try: + return model(**args) + except ValidationError as e1: + # try to restack extra fields into ``value`` + if "value" in model.model_fields: + value_dict = { + key: val for key, val in args.items() if key not in model.model_fields + } + for k in value_dict: + del args[k] + args["value"] = value_dict + try: + return model(**args) + except Exception as e2: + raise e2 from e1 + else: + raise e1 + else: if "name" in args: del args["name"] From 886d3db8604427d67bd0bfcc8fc4802c80fb2dbf Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 25 Sep 2024 22:58:02 -0700 Subject: [PATCH 11/18] model checkpoint before fixing group generation --- nwb_linkml/src/nwb_linkml/includes/base.py | 14 +- .../pydantic/core/v2_2_0/core_nwb_base.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_device.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_file.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_image.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_misc.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 19 ++- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 19 ++- .../core/v2_2_0/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_2_0/namespace.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_base.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_device.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_file.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_image.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_misc.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 19 ++- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 19 ++- .../core/v2_2_1/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_2_1/namespace.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_base.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_device.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_file.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_image.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_misc.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 19 ++- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 19 ++- .../core/v2_2_2/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_2_2/namespace.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_base.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_device.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_file.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_image.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_misc.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 19 ++- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 19 ++- .../core/v2_2_4/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_2_4/namespace.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_base.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_device.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_file.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_image.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_misc.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 19 ++- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 19 ++- .../core/v2_2_5/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_2_5/namespace.py | 19 ++- .../pydantic/core/v2_3_0/core_nwb_base.py | 30 +++- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 55 ++++++-- .../pydantic/core/v2_3_0/core_nwb_device.py | 19 ++- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 39 +++-- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_3_0/core_nwb_file.py | 19 ++- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 29 +++- .../pydantic/core/v2_3_0/core_nwb_image.py | 33 +++-- .../pydantic/core/v2_3_0/core_nwb_misc.py | 49 ++++--- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 21 ++- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 38 ++++- .../core/v2_3_0/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_3_0/namespace.py | 19 ++- .../pydantic/core/v2_4_0/core_nwb_base.py | 32 ++++- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 55 ++++++-- .../pydantic/core/v2_4_0/core_nwb_device.py | 19 ++- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 39 +++-- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_4_0/core_nwb_file.py | 19 ++- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 33 ++++- .../pydantic/core/v2_4_0/core_nwb_image.py | 33 +++-- .../pydantic/core/v2_4_0/core_nwb_misc.py | 49 ++++--- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 21 ++- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 38 ++++- .../core/v2_4_0/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_4_0/namespace.py | 19 ++- .../pydantic/core/v2_5_0/core_nwb_base.py | 32 ++++- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 59 ++++++-- .../pydantic/core/v2_5_0/core_nwb_device.py | 19 ++- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 39 +++-- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_5_0/core_nwb_file.py | 19 ++- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 33 ++++- .../pydantic/core/v2_5_0/core_nwb_image.py | 33 +++-- .../pydantic/core/v2_5_0/core_nwb_misc.py | 49 ++++--- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 21 ++- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 38 ++++- .../core/v2_5_0/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_5_0/namespace.py | 19 ++- .../core/v2_6_0_alpha/core_nwb_base.py | 32 ++++- .../core/v2_6_0_alpha/core_nwb_behavior.py | 59 ++++++-- .../core/v2_6_0_alpha/core_nwb_device.py | 19 ++- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 39 +++-- .../core/v2_6_0_alpha/core_nwb_epoch.py | 19 ++- .../core/v2_6_0_alpha/core_nwb_file.py | 19 ++- .../core/v2_6_0_alpha/core_nwb_icephys.py | 33 ++++- .../core/v2_6_0_alpha/core_nwb_image.py | 33 +++-- .../core/v2_6_0_alpha/core_nwb_misc.py | 49 ++++--- .../core/v2_6_0_alpha/core_nwb_ogen.py | 21 ++- .../core/v2_6_0_alpha/core_nwb_ophys.py | 38 ++++- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 19 ++- .../pydantic/core/v2_6_0_alpha/namespace.py | 19 ++- .../pydantic/core/v2_7_0/core_nwb_base.py | 32 ++++- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 59 ++++++-- .../pydantic/core/v2_7_0/core_nwb_device.py | 19 ++- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 39 +++-- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 19 ++- .../pydantic/core/v2_7_0/core_nwb_file.py | 19 ++- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 33 ++++- .../pydantic/core/v2_7_0/core_nwb_image.py | 33 +++-- .../pydantic/core/v2_7_0/core_nwb_misc.py | 49 ++++--- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 22 ++- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 38 ++++- .../core/v2_7_0/core_nwb_retinotopy.py | 19 ++- .../models/pydantic/core/v2_7_0/namespace.py | 19 ++- .../models/pydantic/hdmf_common/__init__.py | 1 + .../hdmf_common/v1_1_0/hdmf_common_sparse.py | 21 ++- .../hdmf_common/v1_1_0/hdmf_common_table.py | 60 +++++--- .../pydantic/hdmf_common/v1_1_0/namespace.py | 21 ++- .../hdmf_common/v1_1_2/hdmf_common_sparse.py | 21 ++- .../hdmf_common/v1_1_2/hdmf_common_table.py | 60 +++++--- .../pydantic/hdmf_common/v1_1_2/namespace.py | 21 ++- .../hdmf_common/v1_1_3/hdmf_common_sparse.py | 21 ++- .../hdmf_common/v1_1_3/hdmf_common_table.py | 60 +++++--- .../pydantic/hdmf_common/v1_1_3/namespace.py | 21 ++- .../hdmf_common/v1_2_0/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_2_0/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_2_0/namespace.py | 28 +++- .../hdmf_common/v1_2_1/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_2_1/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_2_1/namespace.py | 28 +++- .../hdmf_common/v1_3_0/hdmf_common_base.py | 28 +++- .../v1_3_0/hdmf_common_resources.py | 28 +++- .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_3_0/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_3_0/namespace.py | 28 +++- .../hdmf_common/v1_4_0/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_4_0/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_4_0/namespace.py | 28 +++- .../hdmf_common/v1_5_0/hdmf_common_base.py | 23 ++- .../hdmf_common/v1_5_0/hdmf_common_sparse.py | 21 ++- .../hdmf_common/v1_5_0/hdmf_common_table.py | 66 ++++++--- .../pydantic/hdmf_common/v1_5_0/namespace.py | 21 ++- .../hdmf_common/v1_5_1/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_5_1/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_5_1/namespace.py | 28 +++- .../hdmf_common/v1_6_0/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_6_0/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_6_0/namespace.py | 28 +++- .../hdmf_common/v1_7_0/hdmf_common_base.py | 28 +++- .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 28 +++- .../hdmf_common/v1_7_0/hdmf_common_table.py | 51 +++++-- .../pydantic/hdmf_common/v1_7_0/namespace.py | 28 +++- .../hdmf_common/v1_8_0/hdmf_common_base.py | 23 ++- .../hdmf_common/v1_8_0/hdmf_common_sparse.py | 21 ++- .../hdmf_common/v1_8_0/hdmf_common_table.py | 66 ++++++--- .../pydantic/hdmf_common/v1_8_0/namespace.py | 21 ++- .../v0_1_0/hdmf_experimental_experimental.py | 25 +++- .../v0_1_0/hdmf_experimental_resources.py | 25 +++- .../hdmf_experimental/v0_1_0/namespace.py | 28 +++- .../v0_2_0/hdmf_experimental_experimental.py | 28 +++- .../v0_2_0/hdmf_experimental_resources.py | 28 +++- .../hdmf_experimental/v0_2_0/namespace.py | 28 +++- .../v0_3_0/hdmf_experimental_experimental.py | 28 +++- .../v0_3_0/hdmf_experimental_resources.py | 28 +++- .../hdmf_experimental/v0_3_0/namespace.py | 28 +++- .../v0_4_0/hdmf_experimental_experimental.py | 28 +++- .../v0_4_0/hdmf_experimental_resources.py | 28 +++- .../hdmf_experimental/v0_4_0/namespace.py | 28 +++- .../v0_5_0/hdmf_experimental_experimental.py | 21 ++- .../v0_5_0/hdmf_experimental_resources.py | 21 ++- .../hdmf_experimental/v0_5_0/namespace.py | 21 ++- .../linkml/core/v2_3_0/core.nwb.base.yaml | 25 +++- .../linkml/core/v2_3_0/core.nwb.behavior.yaml | 133 ++++++++++++------ .../linkml/core/v2_3_0/core.nwb.ecephys.yaml | 57 +++++--- .../linkml/core/v2_3_0/core.nwb.language.yaml | 1 + .../linkml/core/v2_3_0/core.nwb.ophys.yaml | 76 ++++++---- .../linkml/core/v2_4_0/core.nwb.base.yaml | 25 +++- .../linkml/core/v2_4_0/core.nwb.behavior.yaml | 133 ++++++++++++------ .../linkml/core/v2_4_0/core.nwb.ecephys.yaml | 57 +++++--- .../linkml/core/v2_4_0/core.nwb.language.yaml | 1 + .../linkml/core/v2_4_0/core.nwb.ophys.yaml | 76 ++++++---- .../linkml/core/v2_5_0/core.nwb.base.yaml | 25 +++- .../linkml/core/v2_5_0/core.nwb.behavior.yaml | 133 ++++++++++++------ .../linkml/core/v2_5_0/core.nwb.ecephys.yaml | 57 +++++--- .../linkml/core/v2_5_0/core.nwb.language.yaml | 1 + .../linkml/core/v2_5_0/core.nwb.ophys.yaml | 76 ++++++---- .../core/v2_6_0_alpha/core.nwb.base.yaml | 25 +++- .../core/v2_6_0_alpha/core.nwb.behavior.yaml | 133 ++++++++++++------ .../core/v2_6_0_alpha/core.nwb.ecephys.yaml | 57 +++++--- .../core/v2_6_0_alpha/core.nwb.language.yaml | 1 + .../core/v2_6_0_alpha/core.nwb.ophys.yaml | 76 ++++++---- .../linkml/core/v2_7_0/core.nwb.base.yaml | 25 +++- .../linkml/core/v2_7_0/core.nwb.behavior.yaml | 133 ++++++++++++------ .../linkml/core/v2_7_0/core.nwb.ecephys.yaml | 57 +++++--- .../linkml/core/v2_7_0/core.nwb.language.yaml | 1 + .../linkml/core/v2_7_0/core.nwb.ophys.yaml | 76 ++++++---- .../v1_4_0/hdmf-common.nwb.language.yaml | 1 + .../hdmf_common/v1_5_0/hdmf-common.base.yaml | 18 ++- .../v1_5_0/hdmf-common.nwb.language.yaml | 3 +- .../hdmf_common/v1_5_0/hdmf-common.table.yaml | 28 +++- .../v1_5_1/hdmf-common.nwb.language.yaml | 1 + .../v1_6_0/hdmf-common.nwb.language.yaml | 1 + .../v1_7_0/hdmf-common.nwb.language.yaml | 1 + .../hdmf_common/v1_8_0/hdmf-common.base.yaml | 18 ++- .../v1_8_0/hdmf-common.nwb.language.yaml | 3 +- .../hdmf_common/v1_8_0/hdmf-common.table.yaml | 28 +++- .../hdmf-experimental.experimental.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 3 +- .../v0_1_0/hdmf-experimental.resources.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 1 + .../hdmf-experimental.nwb.language.yaml | 1 + .../hdmf-experimental.nwb.language.yaml | 1 + .../hdmf-experimental.nwb.language.yaml | 3 +- 235 files changed, 5356 insertions(+), 1385 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index 2a6e84d..c081587 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -27,12 +27,7 @@ BASEMODEL_COERCE_VALUE = """ try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 """ BASEMODEL_CAST_WITH_VALUE = """ @@ -46,12 +41,7 @@ BASEMODEL_CAST_WITH_VALUE = """ try: return handler({"value": v}) except Exception: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 """ BASEMODEL_COERCE_CHILD = """ diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py index 3a7121f..0aca06c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 3cdb697..5136ecd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py index 35e469e..c08c6ff 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 50c35df..42d8653 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py index d9e8670..3eef400 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py index 574b467..613b948 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -84,7 +96,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py index c118384..be65250 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py index 1f4d01f..8185f44 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py index 97cc604..e7e1279 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 1fec83c..100fc9b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 587c4a8..a8f98a2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -73,6 +73,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -83,7 +95,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 66760ac..718a1c6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -52,7 +52,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -65,6 +65,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -75,7 +87,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py index 39065e8..92e7d3e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py @@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -193,7 +205,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py index d509055..d92282c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 9520fc7..2c853af 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py index c6c0821..b1b1b16 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index 59392d2..f74a7e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 6b07a73..7f4bb40 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py index bb40dc6..b203b9b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -84,7 +96,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py index e55e757..772841e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py index ee1b247..5e824fa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py index 4ccb41a..e212e58 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py index c26180e..81dcca7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 27ca8a7..e054ec2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -73,6 +73,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -83,7 +95,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 8b286c0..0b7b720 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -52,7 +52,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -65,6 +65,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -75,7 +87,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py index 1000273..228bdac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py @@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -193,7 +205,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py index 1608518..d2b5e5c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 7a295d0..0935da2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py index 952fd14..4aa5ede 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 678627d..4336705 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 4398b2e..7731ed5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py index a754163..f694d2c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -84,7 +96,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 7e8e860..4858631 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py index c6a1bdf..56bc5e9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py index 6ebbb31..59564fe 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py index d8706fb..36196b3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 9355878..f2b41df 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -73,6 +73,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -83,7 +95,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 4a6bba8..3f6e756 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py index 68aa1ca..b89e5a5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py @@ -173,7 +173,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -186,6 +186,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -196,7 +208,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py index 79dbd8e..b163e24 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py index d8c8111..5bcab66 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py index bd4959b..f370d51 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index d5ca311..f28980b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 7d0e888..cb37363 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py index ffc9887..8172d22 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -85,7 +97,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 944696c..4c5a2cf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py index ec713bb..bf08d2b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py index 6fb5183..f98882b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 959ec53..38f8335 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py index f8b39ac..1199e61 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 271df8c..0af340d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py index ba443d7..69651fd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py @@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -203,7 +215,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py index 14986e7..3afefe9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 686f581..f6ae2e5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py index cc5f2a1..eec3289 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 78d83f8..90b585e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 69289cd..71eeb95 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py index 2a7b510..6648e37 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -85,7 +97,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 4d04eec..403fec4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py index 22bc4dc..9c670dd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py index 2730432..e73895a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py index 7a31e4c..8c8b746 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py index f7e0674..a31241c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 5594f52..5d13706 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py index 4f78598..6be8c81 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py @@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -203,7 +215,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py index 0450a89..5f82fcd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -67,7 +79,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -138,9 +153,9 @@ class Image(NWBData): description: Optional[str] = Field(None, description="""Description of the image.""") value: Optional[ Union[ - NDArray[Shape["* x, * y"], float], - NDArray[Shape["* x, * y, 3 r_g_b"], float], - NDArray[Shape["* x, * y, 4 r_g_b_a"], float], + NDArray[Shape["* x, * y"], float | int], + NDArray[Shape["* x, * y, 3 r_g_b"], float | int], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int], ] ] = Field(None) @@ -305,13 +320,16 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) + name: str = Field(...) + description: str = Field( + ..., description="""Description of this collection of processed data.""" + ) value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} }, ) - name: str = Field(...) class Images(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py index fbf64a4..afb8921 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -197,8 +212,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -212,10 +227,13 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEpochs", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}}, + ) value: Optional[Dict[str, IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) - name: str = Field(...) class BehavioralEvents(NWBDataInterface): @@ -227,10 +245,13 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEvents", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class BehavioralTimeSeries(NWBDataInterface): @@ -242,10 +263,13 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralTimeSeries", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class PupilTracking(NWBDataInterface): @@ -257,10 +281,12 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}} + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class EyeTracking(NWBDataInterface): @@ -272,10 +298,12 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class CompassDirection(NWBDataInterface): @@ -287,10 +315,13 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "CompassDirection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}}, + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class Position(NWBDataInterface): @@ -302,10 +333,12 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py index 08d5073..2fc40f2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 6630b23..6af7ff2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -243,9 +258,9 @@ class ElectricalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_channels"], float | int], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -351,8 +366,8 @@ class SpikeEventSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float | int], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -455,10 +470,12 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}} + ) value: Optional[Dict[str, SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) - name: str = Field(...) class FilteredEphys(NWBDataInterface): @@ -470,10 +487,12 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}} + ) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class LFP(NWBDataInterface): @@ -485,10 +504,10 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}}) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class ElectrodeGroup(NWBContainer): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 96ec1a4..0a22431 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py index 7cb5cb1..13d67cb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -85,7 +97,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 6be9e7e..6094a09 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -63,7 +63,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -76,6 +76,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -86,7 +98,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -242,7 +257,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -349,7 +364,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -527,7 +542,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -704,7 +719,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -932,7 +947,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py index b080a46..4964397 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -67,7 +79,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -116,7 +131,7 @@ class GrayscaleImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}} @@ -138,7 +153,7 @@ class RGBImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -168,7 +183,7 @@ class RGBAImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -292,8 +307,8 @@ class ImageSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, * z"], float | int], ] ] = Field(None) @@ -518,8 +533,8 @@ class OpticalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py index 90ee48e..95576ab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -234,8 +249,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -520,19 +535,21 @@ class DecompositionSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) - value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( - None, - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_times"}, - {"alias": "num_channels"}, - {"alias": "num_bands"}, - ] + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = ( + Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_times"}, + {"alias": "num_channels"}, + {"alias": "num_bands"}, + ] + } } - } - }, + }, + ) ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py index e55129b..e66477e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -200,7 +215,7 @@ class OptogeneticSeriesData(ConfiguredBaseModel): description="""Unit of measurement for data, which is fixed to 'watts'.""", json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py index f6eb103..6813220 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -333,7 +348,8 @@ class RoiResponseSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) @@ -347,10 +363,10 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}}) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class Fluorescence(NWBDataInterface): @@ -362,10 +378,12 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}} + ) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class ImageSegmentation(NWBDataInterface): @@ -377,10 +395,13 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "ImageSegmentation", + json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}}, + ) value: Optional[Dict[str, PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) - name: str = Field(...) class PlaneSegmentation(DynamicTable): @@ -696,10 +717,13 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "MotionCorrection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}}, + ) value: Optional[Dict[str, CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) - name: str = Field(...) class CorrectedImageStack(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 38f2e67..67ebc9b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py index a0842a1..c562bb8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py @@ -197,7 +197,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -210,6 +210,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -220,7 +232,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py index 0948248..a3f929e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -109,7 +124,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -333,9 +348,9 @@ class Image(NWBData): description: Optional[str] = Field(None, description="""Description of the image.""") value: Optional[ Union[ - NDArray[Shape["* x, * y"], float], - NDArray[Shape["* x, * y, 3 r_g_b"], float], - NDArray[Shape["* x, * y, 4 r_g_b_a"], float], + NDArray[Shape["* x, * y"], float | int], + NDArray[Shape["* x, * y, 3 r_g_b"], float | int], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int], ] ] = Field(None) @@ -500,13 +515,16 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) + name: str = Field(...) + description: str = Field( + ..., description="""Description of this collection of processed data.""" + ) value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} }, ) - name: str = Field(...) class Images(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 074546e..7a46192 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -197,8 +212,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -212,10 +227,13 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEpochs", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}}, + ) value: Optional[Dict[str, IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) - name: str = Field(...) class BehavioralEvents(NWBDataInterface): @@ -227,10 +245,13 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEvents", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class BehavioralTimeSeries(NWBDataInterface): @@ -242,10 +263,13 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralTimeSeries", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class PupilTracking(NWBDataInterface): @@ -257,10 +281,12 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}} + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class EyeTracking(NWBDataInterface): @@ -272,10 +298,12 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class CompassDirection(NWBDataInterface): @@ -287,10 +315,13 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "CompassDirection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}}, + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class Position(NWBDataInterface): @@ -302,10 +333,12 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py index c253e9e..9510135 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 64c0ab0..44503f1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -243,9 +258,9 @@ class ElectricalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_channels"], float | int], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -351,8 +366,8 @@ class SpikeEventSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float | int], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -455,10 +470,12 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}} + ) value: Optional[Dict[str, SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) - name: str = Field(...) class FilteredEphys(NWBDataInterface): @@ -470,10 +487,12 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}} + ) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class LFP(NWBDataInterface): @@ -485,10 +504,10 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}}) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class ElectrodeGroup(NWBContainer): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 6904077..1c1b5ef 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py index c92a64b..26bd1fa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -70,7 +70,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -83,6 +83,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -93,7 +105,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 0fd2950..d6191fa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -245,7 +260,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -352,7 +367,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -530,7 +545,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -707,7 +722,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -935,7 +950,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -1204,6 +1219,10 @@ class IntracellularRecordingsTable(AlignedDynamicTable): stimuli: IntracellularStimuliTable = Field( ..., description="""Table for storing intracellular stimulus related metadata.""" ) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py index 10a1d92..1ed0f7f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -67,7 +79,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -116,7 +131,7 @@ class GrayscaleImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}} @@ -138,7 +153,7 @@ class RGBImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -168,7 +183,7 @@ class RGBAImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -293,8 +308,8 @@ class ImageSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, * z"], float | int], ] ] = Field(None) @@ -520,8 +535,8 @@ class OpticalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py index 3ccb411..4f4825b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -234,8 +249,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -520,19 +535,21 @@ class DecompositionSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) - value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( - None, - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_times"}, - {"alias": "num_channels"}, - {"alias": "num_bands"}, - ] + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = ( + Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_times"}, + {"alias": "num_channels"}, + {"alias": "num_bands"}, + ] + } } - } - }, + }, + ) ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py index bddf0ea..64478a5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -200,7 +215,7 @@ class OptogeneticSeriesData(ConfiguredBaseModel): description="""Unit of measurement for data, which is fixed to 'watts'.""", json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 73f8fcd..6e69afa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -334,7 +349,8 @@ class RoiResponseSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) @@ -348,10 +364,10 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}}) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class Fluorescence(NWBDataInterface): @@ -363,10 +379,12 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}} + ) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class ImageSegmentation(NWBDataInterface): @@ -378,10 +396,13 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "ImageSegmentation", + json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}}, + ) value: Optional[Dict[str, PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) - name: str = Field(...) class PlaneSegmentation(DynamicTable): @@ -697,10 +718,13 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "MotionCorrection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}}, + ) value: Optional[Dict[str, CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) - name: str = Field(...) class CorrectedImageStack(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index 60f8b4c..3024bed 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py index 775c660..2055051 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py @@ -210,7 +210,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -223,6 +223,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -233,7 +245,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py index 7dcc142..abb0545 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -91,7 +103,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -120,7 +135,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -359,9 +374,9 @@ class Image(NWBData): description: Optional[str] = Field(None, description="""Description of the image.""") value: Optional[ Union[ - NDArray[Shape["* x, * y"], float], - NDArray[Shape["* x, * y, 3 r_g_b"], float], - NDArray[Shape["* x, * y, 4 r_g_b_a"], float], + NDArray[Shape["* x, * y"], float | int], + NDArray[Shape["* x, * y, 3 r_g_b"], float | int], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int], ] ] = Field(None) @@ -551,13 +566,16 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) + name: str = Field(...) + description: str = Field( + ..., description="""Description of this collection of processed data.""" + ) value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} }, ) - name: str = Field(...) class Images(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py index cfa3239..3df8abe 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -201,10 +216,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, 1 x"], float], - NDArray[Shape["* num_times, 2 x_y"], float], - NDArray[Shape["* num_times, 3 x_y_z"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, 1 x"], float | int], + NDArray[Shape["* num_times, 2 x_y"], float | int], + NDArray[Shape["* num_times, 3 x_y_z"], float | int], ] ] = Field(None) @@ -218,10 +233,13 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEpochs", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}}, + ) value: Optional[Dict[str, IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) - name: str = Field(...) class BehavioralEvents(NWBDataInterface): @@ -233,10 +251,13 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEvents", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class BehavioralTimeSeries(NWBDataInterface): @@ -248,10 +269,13 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralTimeSeries", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class PupilTracking(NWBDataInterface): @@ -263,10 +287,12 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}} + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class EyeTracking(NWBDataInterface): @@ -278,10 +304,12 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class CompassDirection(NWBDataInterface): @@ -293,10 +321,13 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "CompassDirection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}}, + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class Position(NWBDataInterface): @@ -308,10 +339,12 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py index b52ccd6..6ac30a8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 0fa24fc..eec63d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -247,9 +262,9 @@ class ElectricalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_channels"], float | int], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -359,8 +374,8 @@ class SpikeEventSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float | int], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -463,10 +478,12 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}} + ) value: Optional[Dict[str, SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) - name: str = Field(...) class FilteredEphys(NWBDataInterface): @@ -478,10 +495,12 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}} + ) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class LFP(NWBDataInterface): @@ -493,10 +512,10 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}}) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class ElectrodeGroup(NWBContainer): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 94de21c..39d2d4f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py index d0e41d7..338a1cb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -71,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -84,6 +84,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -94,7 +106,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py index baaa066..5628183 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -249,7 +264,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -360,7 +375,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -542,7 +557,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -723,7 +738,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -955,7 +970,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -1225,6 +1240,10 @@ class IntracellularRecordingsTable(AlignedDynamicTable): stimuli: IntracellularStimuliTable = Field( ..., description="""Table for storing intracellular stimulus related metadata.""" ) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py index 4b9edee..4d62c19 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -73,7 +85,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -122,7 +137,7 @@ class GrayscaleImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}} @@ -144,7 +159,7 @@ class RGBImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -174,7 +189,7 @@ class RGBAImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -303,8 +318,8 @@ class ImageSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, * z"], float | int], ] ] = Field(None) @@ -534,8 +549,8 @@ class OpticalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py index 5646cd8..13bfb53 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -238,8 +253,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -536,19 +551,21 @@ class DecompositionSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) - value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( - None, - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_times"}, - {"alias": "num_channels"}, - {"alias": "num_bands"}, - ] + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = ( + Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_times"}, + {"alias": "num_channels"}, + {"alias": "num_bands"}, + ] + } } - } - }, + }, + ) ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 29938d6..724bd92 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -204,7 +219,7 @@ class OptogeneticSeriesData(ConfiguredBaseModel): description="""Unit of measurement for data, which is fixed to 'watts'.""", json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py index ba37576..78e9c62 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -338,7 +353,8 @@ class RoiResponseSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) @@ -352,10 +368,10 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}}) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class Fluorescence(NWBDataInterface): @@ -367,10 +383,12 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}} + ) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class ImageSegmentation(NWBDataInterface): @@ -382,10 +400,13 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "ImageSegmentation", + json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}}, + ) value: Optional[Dict[str, PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) - name: str = Field(...) class PlaneSegmentation(DynamicTable): @@ -701,10 +722,13 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "MotionCorrection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}}, + ) value: Optional[Dict[str, CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) - name: str = Field(...) class CorrectedImageStack(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 82f1e30..f886ffc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py index ff571fa..08d0b93 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py @@ -211,7 +211,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -224,6 +224,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -234,7 +246,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 1881de9..5a1224a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -91,7 +103,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -120,7 +135,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -359,9 +374,9 @@ class Image(NWBData): description: Optional[str] = Field(None, description="""Description of the image.""") value: Optional[ Union[ - NDArray[Shape["* x, * y"], float], - NDArray[Shape["* x, * y, 3 r_g_b"], float], - NDArray[Shape["* x, * y, 4 r_g_b_a"], float], + NDArray[Shape["* x, * y"], float | int], + NDArray[Shape["* x, * y, 3 r_g_b"], float | int], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int], ] ] = Field(None) @@ -551,13 +566,16 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) + name: str = Field(...) + description: str = Field( + ..., description="""Description of this collection of processed data.""" + ) value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} }, ) - name: str = Field(...) class Images(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 9a19768..2abebb7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -201,10 +216,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, 1 x"], float], - NDArray[Shape["* num_times, 2 x_y"], float], - NDArray[Shape["* num_times, 3 x_y_z"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, 1 x"], float | int], + NDArray[Shape["* num_times, 2 x_y"], float | int], + NDArray[Shape["* num_times, 3 x_y_z"], float | int], ] ] = Field(None) @@ -218,10 +233,13 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEpochs", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}}, + ) value: Optional[Dict[str, IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) - name: str = Field(...) class BehavioralEvents(NWBDataInterface): @@ -233,10 +251,13 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEvents", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class BehavioralTimeSeries(NWBDataInterface): @@ -248,10 +269,13 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralTimeSeries", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class PupilTracking(NWBDataInterface): @@ -263,10 +287,12 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}} + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class EyeTracking(NWBDataInterface): @@ -278,10 +304,12 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class CompassDirection(NWBDataInterface): @@ -293,10 +321,13 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "CompassDirection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}}, + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class Position(NWBDataInterface): @@ -308,10 +339,12 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index 2792e91..4aa7351 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 0e59233..d3bc1a8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -247,9 +262,9 @@ class ElectricalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_channels"], float | int], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -359,8 +374,8 @@ class SpikeEventSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float | int], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -463,10 +478,12 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}} + ) value: Optional[Dict[str, SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) - name: str = Field(...) class FilteredEphys(NWBDataInterface): @@ -478,10 +495,12 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}} + ) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class LFP(NWBDataInterface): @@ -493,10 +512,10 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}}) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class ElectrodeGroup(NWBContainer): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index fca71ae..907f235 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index aaea52c..5ad7185 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -71,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -84,6 +84,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -94,7 +106,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 17e121f..84a24e2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -249,7 +264,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -360,7 +375,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -542,7 +557,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -723,7 +738,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -955,7 +970,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -1225,6 +1240,10 @@ class IntracellularRecordingsTable(AlignedDynamicTable): stimuli: IntracellularStimuliTable = Field( ..., description="""Table for storing intracellular stimulus related metadata.""" ) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index e8d4430..733114f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -73,7 +85,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -122,7 +137,7 @@ class GrayscaleImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}} @@ -144,7 +159,7 @@ class RGBImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -174,7 +189,7 @@ class RGBAImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -303,8 +318,8 @@ class ImageSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, * z"], float | int], ] ] = Field(None) @@ -534,8 +549,8 @@ class OpticalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 4278604..43160a9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -238,8 +253,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -536,19 +551,21 @@ class DecompositionSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) - value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( - None, - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_times"}, - {"alias": "num_channels"}, - {"alias": "num_bands"}, - ] + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = ( + Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_times"}, + {"alias": "num_channels"}, + {"alias": "num_bands"}, + ] + } } - } - }, + }, + ) ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index d94f420..f3e6a15 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -204,7 +219,7 @@ class OptogeneticSeriesData(ConfiguredBaseModel): description="""Unit of measurement for data, which is fixed to 'watts'.""", json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index d9ba753..d736ef5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -70,7 +70,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -83,6 +83,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -93,7 +105,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -440,7 +455,8 @@ class RoiResponseSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) @@ -454,10 +470,10 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}}) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class Fluorescence(NWBDataInterface): @@ -469,10 +485,12 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}} + ) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class ImageSegmentation(NWBDataInterface): @@ -484,10 +502,13 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "ImageSegmentation", + json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}}, + ) value: Optional[Dict[str, PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) - name: str = Field(...) class PlaneSegmentation(DynamicTable): @@ -803,10 +824,13 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "MotionCorrection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}}, + ) value: Optional[Dict[str, CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) - name: str = Field(...) class CorrectedImageStack(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index f0ac913..334a02e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py index df0db47..dd19a19 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -213,7 +213,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -226,6 +226,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -236,7 +248,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py index 5a67fdf..6031ce0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -68,7 +68,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -81,6 +81,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -91,7 +103,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -120,7 +135,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -359,9 +374,9 @@ class Image(NWBData): description: Optional[str] = Field(None, description="""Description of the image.""") value: Optional[ Union[ - NDArray[Shape["* x, * y"], float], - NDArray[Shape["* x, * y, 3 r_g_b"], float], - NDArray[Shape["* x, * y, 4 r_g_b_a"], float], + NDArray[Shape["* x, * y"], float | int], + NDArray[Shape["* x, * y, 3 r_g_b"], float | int], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int], ] ] = Field(None) @@ -551,13 +566,16 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) + name: str = Field(...) + description: str = Field( + ..., description="""Description of this collection of processed data.""" + ) value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} }, ) - name: str = Field(...) class Images(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py index a67672a..d0d1919 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -201,10 +216,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, 1 x"], float], - NDArray[Shape["* num_times, 2 x_y"], float], - NDArray[Shape["* num_times, 3 x_y_z"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, 1 x"], float | int], + NDArray[Shape["* num_times, 2 x_y"], float | int], + NDArray[Shape["* num_times, 3 x_y_z"], float | int], ] ] = Field(None) @@ -218,10 +233,13 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEpochs", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}}, + ) value: Optional[Dict[str, IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) - name: str = Field(...) class BehavioralEvents(NWBDataInterface): @@ -233,10 +251,13 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralEvents", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class BehavioralTimeSeries(NWBDataInterface): @@ -248,10 +269,13 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "BehavioralTimeSeries", + json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}}, + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class PupilTracking(NWBDataInterface): @@ -263,10 +287,12 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}} + ) value: Optional[Dict[str, TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) - name: str = Field(...) class EyeTracking(NWBDataInterface): @@ -278,10 +304,12 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class CompassDirection(NWBDataInterface): @@ -293,10 +321,13 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "CompassDirection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}}, + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) class Position(NWBDataInterface): @@ -308,10 +339,12 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) + name: str = Field( + "Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}} + ) value: Optional[Dict[str, SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py index 8a24ab1..6d85cf6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -65,7 +77,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index 195633b..13b1cad 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -247,9 +262,9 @@ class ElectricalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_channels"], float], - NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_channels"], float | int], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -359,8 +374,8 @@ class SpikeEventSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_events, * num_samples"], float], - NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float | int], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int], ] ] = Field(None) @@ -463,10 +478,12 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}} + ) value: Optional[Dict[str, SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) - name: str = Field(...) class FilteredEphys(NWBDataInterface): @@ -478,10 +495,12 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field( + "FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}} + ) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class LFP(NWBDataInterface): @@ -493,10 +512,10 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) + name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}}) value: Optional[Dict[str, ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) - name: str = Field(...) class ElectrodeGroup(NWBContainer): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py index eb514b2..d114b30 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py index b21e028..bcaf029 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -71,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -84,6 +84,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -94,7 +106,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 3cb28da..937b260 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -249,7 +264,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -360,7 +375,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -542,7 +557,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -723,7 +738,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} }, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -955,7 +970,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) - value: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float | int]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -1237,6 +1252,10 @@ class IntracellularRecordingsTable(AlignedDynamicTable): stimuli: IntracellularStimuliTable = Field( ..., description="""Table for storing intracellular stimulus related metadata.""" ) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py index 263bf5b..331bb3a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -63,6 +63,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -73,7 +85,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -122,7 +137,7 @@ class GrayscaleImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}} @@ -144,7 +159,7 @@ class RGBImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -174,7 +189,7 @@ class RGBAImage(Image): ) name: str = Field(...) - value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field( + value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -303,8 +318,8 @@ class ImageSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, * z"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, * z"], float | int], ] ] = Field(None) @@ -534,8 +549,8 @@ class OpticalSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], float], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], + NDArray[Shape["* frame, * x, * y"], float | int], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py index e1dbe0e..8c06050 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +72,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +94,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -238,8 +253,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], - NDArray[Shape["* num_times, * num_features"], float], + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_features"], float | int], ] ] = Field(None) @@ -536,19 +551,21 @@ class DecompositionSeriesData(ConfiguredBaseModel): description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) - value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( - None, - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_times"}, - {"alias": "num_channels"}, - {"alias": "num_bands"}, - ] + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = ( + Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_times"}, + {"alias": "num_channels"}, + {"alias": "num_bands"}, + ] + } } - } - }, + }, + ) ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py index bc21582..f2f31a2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -72,7 +84,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -207,7 +222,8 @@ class OptogeneticSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 8104916..c842819 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -79,6 +79,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -89,7 +101,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -436,7 +451,8 @@ class RoiResponseSeriesData(ConfiguredBaseModel): ) value: Optional[ Union[ - NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] + NDArray[Shape["* num_times"], float | int], + NDArray[Shape["* num_times, * num_rois"], float | int], ] ] = Field(None) @@ -450,10 +466,10 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field("DfOverF", json_schema_extra={"linkml_meta": {"ifabsent": "string(DfOverF)"}}) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class Fluorescence(NWBDataInterface): @@ -465,10 +481,12 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "Fluorescence", json_schema_extra={"linkml_meta": {"ifabsent": "string(Fluorescence)"}} + ) value: Optional[Dict[str, RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) - name: str = Field(...) class ImageSegmentation(NWBDataInterface): @@ -480,10 +498,13 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "ImageSegmentation", + json_schema_extra={"linkml_meta": {"ifabsent": "string(ImageSegmentation)"}}, + ) value: Optional[Dict[str, PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) - name: str = Field(...) class PlaneSegmentation(DynamicTable): @@ -799,10 +820,13 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) + name: str = Field( + "MotionCorrection", + json_schema_extra={"linkml_meta": {"ifabsent": "string(MotionCorrection)"}}, + ) value: Optional[Dict[str, CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) - name: str = Field(...) class CorrectedImageStack(NWBDataInterface): diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index c7ced82..0cdfaac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py index fd6b259..95df714 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py @@ -214,7 +214,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -227,6 +227,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -237,7 +249,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py index e69de29..8b13789 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index 56af1b8..f4811e5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -64,7 +76,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index e52b294..0e5173a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -88,7 +100,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -117,7 +132,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -168,7 +183,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -260,7 +275,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -316,7 +331,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -571,13 +586,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -640,18 +661,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -887,7 +911,7 @@ class Index(Data): ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -900,7 +924,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): description: str = Field(..., description="""Description of what these vectors represent.""") -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -915,7 +939,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -933,7 +957,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -963,7 +987,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py index dcc5707..b494ff8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index 8ce7f43..96a432f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -64,7 +76,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 9065b81..f763497 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -88,7 +100,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -117,7 +132,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -168,7 +183,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -260,7 +275,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -316,7 +331,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -571,13 +586,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -640,18 +661,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -887,7 +911,7 @@ class Index(Data): ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -900,7 +924,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): description: str = Field(..., description="""Description of what these vectors represent.""") -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -915,7 +939,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -933,7 +957,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -963,7 +987,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py index 0f66985..a7b07c0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index c0f7fcc..a702232 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,6 +54,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -64,7 +76,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 749fab9..b047088 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,6 +78,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -88,7 +100,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -117,7 +132,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -168,7 +183,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -260,7 +275,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -316,7 +331,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -571,13 +586,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -640,18 +661,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -887,7 +911,7 @@ class Index(Data): ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -901,7 +925,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -919,7 +943,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -937,7 +961,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -975,7 +999,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py index c505d77..d0eb276 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -36,7 +36,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -70,6 +70,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -80,7 +92,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py index 656629d..62ac415 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py index 13824fe..2e171cb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -20,7 +20,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -52,7 +52,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index fdd6bcc..135e3c6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -970,7 +991,7 @@ class VocabData(VectorData): ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py index 25e5651..1bdd7c4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -35,7 +35,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -67,7 +67,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py index affaa59..c166f2b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py index 01484f3..d68c966 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index cc9029d..1bcfbb6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -970,7 +991,7 @@ class VocabData(VectorData): ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py index 1338679..69a3e11 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -35,7 +35,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -67,7 +67,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py index a7ed66d..ac4722f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index 4d4850c..d24f662 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py index 2620eb6..542738c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index a55c212..724cfba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -970,7 +991,7 @@ class VocabData(VectorData): ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py index 040adf7..96107ed 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -37,7 +37,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -69,7 +69,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py index 02d67bf..80ff949 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py index ad70998..2633579 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index c704fa9..248a227 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +965,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py index 0a85a76..5e9f469 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -61,7 +61,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 7c62f93..47eee69 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -63,7 +75,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -134,10 +149,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index d434cd9..e805809 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index c604b38..00fa3ed 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -90,7 +102,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -119,7 +134,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +185,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +277,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +333,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -573,13 +588,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -642,18 +663,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +886,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +900,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +924,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +942,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +968,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ @@ -975,10 +999,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) + name: str = Field(...) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - name: str = Field(...) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py index 6e04fd0..6a2b5ed 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -74,7 +86,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py index df22948..adb45dd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py index 4e921cc..9731e34 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 6719d89..68e87e2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +965,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py index fa4ea72..83568c9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,7 +62,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py index 57c9079..0ee3b97 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py index 73a6043..a94523d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index d47f747..1fd0a39 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +965,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py index 981e600..a8f55a0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,7 +62,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py index e785e04..08c9fda 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -51,7 +51,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py index b2fe190..a6cd2db 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index dceaa2e..a81bd63 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -78,7 +78,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod @@ -119,7 +141,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +192,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +284,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +340,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -576,8 +598,7 @@ class DynamicTableMixin(BaseModel): title=f"field {key} cannot be cast to VectorData from {val}", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "model_type", "input": val, } ], @@ -653,7 +674,7 @@ class DynamicTableMixin(BaseModel): raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +883,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +897,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +921,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +939,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +965,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py index 4aaa46d..ed5ddbe 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -62,7 +62,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index 7731368..d58cc4b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -19,7 +19,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -53,6 +53,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -63,7 +75,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -134,10 +149,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 7a3e72c..09428fa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index d6699e6..39b7af0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -80,6 +80,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -90,7 +102,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -119,7 +134,7 @@ NUMPYDANTIC_VERSION = "1.2.1" T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel, Generic[T]): +class VectorDataMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -170,7 +185,7 @@ class VectorDataMixin(BaseModel, Generic[T]): return len(self.value) -class VectorIndexMixin(BaseModel, Generic[T]): +class VectorIndexMixin(ConfiguredBaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ @@ -262,7 +277,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): return len(self.value) -class DynamicTableRegionMixin(BaseModel): +class DynamicTableRegionMixin(ConfiguredBaseModel): """ Mixin to allow indexing references to regions of dynamictables """ @@ -318,7 +333,7 @@ class DynamicTableRegionMixin(BaseModel): ) # pragma: no cover -class DynamicTableMixin(BaseModel): +class DynamicTableMixin(ConfiguredBaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -573,13 +588,19 @@ class DynamicTableMixin(BaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "ValueError", - "loc": ("DynamicTableMixin", "cast_extra_columns"), + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -642,18 +663,21 @@ class DynamicTableMixin(BaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: raise e from None -class AlignedDynamicTableMixin(BaseModel): +class AlignedDynamicTableMixin(ConfiguredBaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID @@ -862,7 +886,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(VectorDataMixin, ConfiguredBaseModel): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -876,7 +900,7 @@ class VectorData(VectorDataMixin, ConfiguredBaseModel): value: Optional[T] = Field(None) -class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -900,7 +924,7 @@ class VectorIndex(VectorIndexMixin, ConfiguredBaseModel): ] = Field(None) -class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): +class ElementIdentifiers(ElementIdentifiersMixin, Data): """ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. """ @@ -918,7 +942,7 @@ class ElementIdentifiers(ElementIdentifiersMixin, Data, ConfiguredBaseModel): ) -class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseModel): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ @@ -944,7 +968,7 @@ class DynamicTableRegion(DynamicTableRegionMixin, VectorData, ConfiguredBaseMode ] = Field(None) -class DynamicTable(DynamicTableMixin, ConfiguredBaseModel): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ @@ -975,10 +999,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) + name: str = Field(...) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - name: str = Field(...) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py index dd09b7f..0e792f9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -64,6 +64,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -74,7 +86,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index ad617da..e265d0c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -11,7 +11,7 @@ import numpy as np from numpydantic import NDArray, Shape from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData metamodel_version = "None" @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -99,7 +114,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.experimental/", "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.experimental", } ) diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index cda720e..867c41e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -11,7 +11,7 @@ import numpy as np from numpydantic import NDArray, Shape from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data +from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data metamodel_version = "None" @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass @@ -99,7 +114,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.resources/", "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.resources", } ) diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 3429a1e..d133165 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -10,9 +10,10 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data, SimpleMultiContainer -from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix -from ...hdmf_common.v1_4_0.hdmf_common_table import ( +from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data, SimpleMultiContainer +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + AlignedDynamicTable, DynamicTable, DynamicTableRegion, ElementIdentifiers, @@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -82,7 +95,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py index 1a88edc..c48bb0b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index 8d5af36..0ef8026 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py index c697f83..7be5bf0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -71,7 +71,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py index cbd0ad9..894b2ab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index 9f337fa..81be11e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py index e1a12ca..fcc1422 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -71,7 +71,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py index 0551cfd..6c9d22b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index 09e6f05..8f68ec9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -54,7 +54,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py index c904202..c79550d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -72,7 +72,29 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise e1 + raise ValueError( + f"coerce_value: Could not use the value field of {type(v)} " + f"to construct {cls.__name__}.{info.field_name}, " + f"expected type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 + + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception as e2: + raise ValueError( + f"cast_with_value: Could not cast {type(v)} as value field for " + f"{cls.__name__}.{info.field_name}," + f" expected_type: {cls.model_fields[info.field_name].annotation}\n" + f"inner error: {str(e1)}" + ) from e1 @field_validator("*", mode="before") @classmethod diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 714ae52..368d037 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index d3132cd..8402336 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -66,7 +78,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py index 281e5b2..de810ad 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): model_config = ConfigDict( validate_assignment=True, validate_default=True, - extra="allow", + extra="forbid", arbitrary_types_allowed=True, use_enum_values=True, strict=False, @@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod - def coerce_value(cls, v: Any, handler) -> Any: + def coerce_value(cls, v: Any, handler, info) -> Any: """Try to rescue instantiation by using the value field""" try: return handler(v) @@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel): except (IndexError, KeyError, TypeError): raise e1 + @field_validator("*", mode="wrap") + @classmethod + def cast_with_value(cls, v: Any, handler, info) -> Any: + """Try to rescue instantiation by casting into the model's value fiel""" + try: + return handler(v) + except Exception as e1: + try: + return handler({"value": v}) + except Exception: + raise e1 + @field_validator("*", mode="before") @classmethod def coerce_subclass(cls, v: Any, info) -> Any: @@ -84,7 +96,10 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml index 04aec2d..1771272 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.base.yaml @@ -304,13 +304,24 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: NWBDataInterface - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of processed data. + range: text + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable tree_root: true Images: name: Images diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml index 27f87f4..4b3440f 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml @@ -127,12 +127,19 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: IntervalSeries + name: + name: name + ifabsent: string(BehavioralEpochs) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries tree_root: true BehavioralEvents: name: BehavioralEvents @@ -140,12 +147,19 @@ classes: for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralEvents) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true BehavioralTimeSeries: name: BehavioralTimeSeries @@ -153,36 +167,57 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralTimeSeries) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true PupilTracking: name: PupilTracking description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(PupilTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true EyeTracking: name: EyeTracking description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(EyeTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true CompassDirection: name: CompassDirection @@ -193,22 +228,36 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(CompassDirection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true Position: name: Position description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(Position) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml index 8ce3b23..9b4593c 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml @@ -367,12 +367,19 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpikeEventSeries + name: + name: name + ifabsent: string(EventWaveform) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries tree_root: true FilteredEphys: name: FilteredEphys @@ -389,12 +396,19 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(FilteredEphys) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true LFP: name: LFP @@ -403,12 +417,19 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(LFP) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true ElectrodeGroup: name: ElectrodeGroup diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml index e42c742..a921651 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml index 5041d82..aa4dcd3 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml @@ -169,12 +169,19 @@ classes: for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(DfOverF) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true Fluorescence: name: Fluorescence @@ -183,12 +190,19 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(Fluorescence) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true ImageSegmentation: name: ImageSegmentation @@ -201,12 +215,19 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: PlaneSegmentation + name: + name: name + ifabsent: string(ImageSegmentation) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation tree_root: true PlaneSegmentation: name: PlaneSegmentation @@ -602,12 +623,19 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: CorrectedImageStack + name: + name: name + ifabsent: string(MotionCorrection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack tree_root: true CorrectedImageStack: name: CorrectedImageStack diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml index 68f456b..077ab82 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.base.yaml @@ -343,13 +343,24 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: NWBDataInterface - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of processed data. + range: text + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable tree_root: true Images: name: Images diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml index 477072b..c16feb9 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml @@ -127,12 +127,19 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: IntervalSeries + name: + name: name + ifabsent: string(BehavioralEpochs) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries tree_root: true BehavioralEvents: name: BehavioralEvents @@ -140,12 +147,19 @@ classes: for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralEvents) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true BehavioralTimeSeries: name: BehavioralTimeSeries @@ -153,36 +167,57 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralTimeSeries) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true PupilTracking: name: PupilTracking description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(PupilTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true EyeTracking: name: EyeTracking description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(EyeTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true CompassDirection: name: CompassDirection @@ -193,22 +228,36 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(CompassDirection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true Position: name: Position description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(Position) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml index 47187f0..e28b420 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml @@ -367,12 +367,19 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpikeEventSeries + name: + name: name + ifabsent: string(EventWaveform) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries tree_root: true FilteredEphys: name: FilteredEphys @@ -389,12 +396,19 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(FilteredEphys) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true LFP: name: LFP @@ -403,12 +417,19 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(LFP) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true ElectrodeGroup: name: ElectrodeGroup diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml index e42c742..a921651 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml index c87f8be..d63b6ba 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml @@ -169,12 +169,19 @@ classes: for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(DfOverF) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true Fluorescence: name: Fluorescence @@ -183,12 +190,19 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(Fluorescence) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true ImageSegmentation: name: ImageSegmentation @@ -201,12 +215,19 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: PlaneSegmentation + name: + name: name + ifabsent: string(ImageSegmentation) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation tree_root: true PlaneSegmentation: name: PlaneSegmentation @@ -602,12 +623,19 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: CorrectedImageStack + name: + name: name + ifabsent: string(MotionCorrection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack tree_root: true CorrectedImageStack: name: CorrectedImageStack diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml index 5809bb5..a92af97 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.base.yaml @@ -375,13 +375,24 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: NWBDataInterface - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of processed data. + range: text + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable tree_root: true Images: name: Images diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml index 9db47e3..cdb2a98 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml @@ -148,12 +148,19 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: IntervalSeries + name: + name: name + ifabsent: string(BehavioralEpochs) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries tree_root: true BehavioralEvents: name: BehavioralEvents @@ -161,12 +168,19 @@ classes: for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralEvents) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true BehavioralTimeSeries: name: BehavioralTimeSeries @@ -174,36 +188,57 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralTimeSeries) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true PupilTracking: name: PupilTracking description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(PupilTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true EyeTracking: name: EyeTracking description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(EyeTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true CompassDirection: name: CompassDirection @@ -214,22 +249,36 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(CompassDirection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true Position: name: Position description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(Position) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml index 6700cc0..8439a6a 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml @@ -385,12 +385,19 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpikeEventSeries + name: + name: name + ifabsent: string(EventWaveform) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries tree_root: true FilteredEphys: name: FilteredEphys @@ -407,12 +414,19 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(FilteredEphys) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true LFP: name: LFP @@ -421,12 +435,19 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(LFP) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true ElectrodeGroup: name: ElectrodeGroup diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml index e42c742..a921651 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml index 3658597..2147878 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml @@ -178,12 +178,19 @@ classes: for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(DfOverF) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true Fluorescence: name: Fluorescence @@ -192,12 +199,19 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(Fluorescence) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true ImageSegmentation: name: ImageSegmentation @@ -210,12 +224,19 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: PlaneSegmentation + name: + name: name + ifabsent: string(ImageSegmentation) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation tree_root: true PlaneSegmentation: name: PlaneSegmentation @@ -611,12 +632,19 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: CorrectedImageStack + name: + name: name + ifabsent: string(MotionCorrection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack tree_root: true CorrectedImageStack: name: CorrectedImageStack diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml index f7f96c9..13fe72d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml @@ -375,13 +375,24 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: NWBDataInterface - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of processed data. + range: text + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable tree_root: true Images: name: Images diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml index 47c9e78..123714b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml @@ -148,12 +148,19 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: IntervalSeries + name: + name: name + ifabsent: string(BehavioralEpochs) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries tree_root: true BehavioralEvents: name: BehavioralEvents @@ -161,12 +168,19 @@ classes: for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralEvents) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true BehavioralTimeSeries: name: BehavioralTimeSeries @@ -174,36 +188,57 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralTimeSeries) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true PupilTracking: name: PupilTracking description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(PupilTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true EyeTracking: name: EyeTracking description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(EyeTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true CompassDirection: name: CompassDirection @@ -214,22 +249,36 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(CompassDirection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true Position: name: Position description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(Position) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml index 2434030..0deca51 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml @@ -385,12 +385,19 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpikeEventSeries + name: + name: name + ifabsent: string(EventWaveform) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries tree_root: true FilteredEphys: name: FilteredEphys @@ -407,12 +414,19 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(FilteredEphys) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true LFP: name: LFP @@ -421,12 +435,19 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(LFP) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true ElectrodeGroup: name: ElectrodeGroup diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml index e42c742..a921651 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml index 9226935..d608587 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml @@ -232,12 +232,19 @@ classes: for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(DfOverF) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true Fluorescence: name: Fluorescence @@ -246,12 +253,19 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(Fluorescence) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true ImageSegmentation: name: ImageSegmentation @@ -264,12 +278,19 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: PlaneSegmentation + name: + name: name + ifabsent: string(ImageSegmentation) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation tree_root: true PlaneSegmentation: name: PlaneSegmentation @@ -665,12 +686,19 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: CorrectedImageStack + name: + name: name + ifabsent: string(MotionCorrection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack tree_root: true CorrectedImageStack: name: CorrectedImageStack diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml index ab0ec35..e158341 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -375,13 +375,24 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: NWBDataInterface - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of processed data. + range: text + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable tree_root: true Images: name: Images diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml index 5f2dc7d..6a4ec81 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml @@ -148,12 +148,19 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: IntervalSeries + name: + name: name + ifabsent: string(BehavioralEpochs) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries tree_root: true BehavioralEvents: name: BehavioralEvents @@ -161,12 +168,19 @@ classes: for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralEvents) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true BehavioralTimeSeries: name: BehavioralTimeSeries @@ -174,36 +188,57 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(BehavioralTimeSeries) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true PupilTracking: name: PupilTracking description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeSeries + name: + name: name + ifabsent: string(PupilTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries tree_root: true EyeTracking: name: EyeTracking description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(EyeTracking) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true CompassDirection: name: CompassDirection @@ -214,22 +249,36 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(CompassDirection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true Position: name: Position description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpatialSeries + name: + name: name + ifabsent: string(Position) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml index db3213f..42927b6 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -385,12 +385,19 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: SpikeEventSeries + name: + name: name + ifabsent: string(EventWaveform) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries tree_root: true FilteredEphys: name: FilteredEphys @@ -407,12 +414,19 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(FilteredEphys) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true LFP: name: LFP @@ -421,12 +435,19 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: ElectricalSeries + name: + name: name + ifabsent: string(LFP) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries tree_root: true ElectrodeGroup: name: ElectrodeGroup diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml index e42c742..a921651 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml index 61cb747..293371a 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -232,12 +232,19 @@ classes: for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(DfOverF) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true Fluorescence: name: Fluorescence @@ -246,12 +253,19 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: RoiResponseSeries + name: + name: name + ifabsent: string(Fluorescence) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries tree_root: true ImageSegmentation: name: ImageSegmentation @@ -264,12 +278,19 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: PlaneSegmentation + name: + name: name + ifabsent: string(ImageSegmentation) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation tree_root: true PlaneSegmentation: name: PlaneSegmentation @@ -665,12 +686,19 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: CorrectedImageStack + name: + name: name + ifabsent: string(MotionCorrection) + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack tree_root: true CorrectedImageStack: name: CorrectedImageStack diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml index e3d3df3..9b6bc55 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml index 91de7c2..7493ece 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml index e3d3df3..1842589 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml index b8e1134..a29024b 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml @@ -189,10 +189,26 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + required: true + multivalued: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml index e3d3df3..9b6bc55 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml index e3d3df3..9b6bc55 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml index e3d3df3..9b6bc55 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml index ea83af3..3a89816 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml index e3d3df3..1842589 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml index c7f3d0d..8b73408 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml @@ -189,10 +189,26 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + required: true + multivalued: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index 2a10ba2..b0b87d5 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.experimental version: 0.1.0 imports: -- ../../hdmf_common/v1_4_0/namespace +- ../../hdmf_common/v1_5_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.experimental/ classes: diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml index 0a824ca..650c484 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index a8d955d..9aeb7d0 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.resources version: 0.1.0 imports: -- ../../hdmf_common/v1_4_0/namespace +- ../../hdmf_common/v1_5_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.resources/ classes: diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml index 0a824ca..b487163 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml index 0a824ca..b487163 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml index 0a824ca..b487163 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml index 0a824ca..650c484 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -66,6 +66,7 @@ types: numeric: name: numeric typeof: float + repr: float | int text: name: text typeof: string From f9f1d49fcaa3a0d09f831dbffe89fc84a8b66b98 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:02:16 -0700 Subject: [PATCH 12/18] working complete, strict validating io :) --- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 37 ++++++++ nwb_linkml/src/nwb_linkml/adapters/group.py | 88 ++++++++++++++++--- .../src/nwb_linkml/generators/pydantic.py | 11 ++- nwb_linkml/src/nwb_linkml/includes/base.py | 22 ++++- nwb_linkml/src/nwb_linkml/io/hdf5.py | 40 +++++---- nwb_linkml/src/nwb_linkml/lang_elements.py | 4 + nwb_linkml/tests/test_io/test_io_nwb.py | 2 +- scripts/generate_core.py | 50 +++-------- 8 files changed, 180 insertions(+), 74 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 1ceb7b5..07c5231 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -354,3 +354,40 @@ def defaults(cls: Dataset | Attribute) -> dict: ret["ifabsent"] = cls.default_value return ret + + +def is_container(group: Group) -> bool: + """ + Check if a group is a container group. + + i.e. a group that... + * has no name + * multivalued quantity + * has a ``neurodata_type_inc`` + * has no ``neurodata_type_def`` + * has no sub-groups + * has no datasets + * has no attributes + + Examples: + + .. code-block:: yaml + + - name: templates + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries objects containing template data of presented stimuli. + quantity: '*' + - neurodata_type_inc: Images + doc: Images objects containing images of presented stimuli. + quantity: '*' + """ + return ( + not group.name + and group.quantity == "*" + and group.neurodata_type_inc + and not group.neurodata_type_def + and not group.datasets + and not group.groups + and not group.attributes + ) diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py index fb919d0..f9ef07d 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/group.py +++ b/nwb_linkml/src/nwb_linkml/adapters/group.py @@ -2,11 +2,11 @@ Adapter for NWB groups to linkml Classes """ -from typing import List, Type +from typing import Type from linkml_runtime.linkml_model import SlotDefinition -from nwb_linkml.adapters.adapter import BuildResult +from nwb_linkml.adapters.adapter import BuildResult, is_container from nwb_linkml.adapters.classes import ClassAdapter from nwb_linkml.adapters.dataset import DatasetAdapter from nwb_linkml.maps import QUANTITY_MAP @@ -45,19 +45,21 @@ class GroupAdapter(ClassAdapter): ): return self.handle_container_slot(self.cls) - nested_res = self.build_subclasses() - # add links - links = self.build_links() + nested_res = self.build_datasets() + nested_res += self.build_groups() + nested_res += self.build_links() + nested_res += self.build_containers() + nested_res += self.build_special_cases() # we don't propagate slots up to the next level since they are meant for this # level (ie. a way to refer to our children) - res = self.build_base(extra_attrs=nested_res.slots + links) + res = self.build_base(extra_attrs=nested_res.slots) # we do propagate classes tho res.classes.extend(nested_res.classes) return res - def build_links(self) -> List[SlotDefinition]: + def build_links(self) -> BuildResult: """ Build links specified in the ``links`` field as slots that refer to other classes, with an additional annotation specifying that they are in fact links. @@ -66,7 +68,7 @@ class GroupAdapter(ClassAdapter): file hierarchy as a string. """ if not self.cls.links: - return [] + return BuildResult() annotations = [{"tag": "source_type", "value": "link"}] @@ -83,7 +85,7 @@ class GroupAdapter(ClassAdapter): ) for link in self.cls.links ] - return slots + return BuildResult(slots=slots) def handle_container_group(self, cls: Group) -> BuildResult: """ @@ -129,7 +131,7 @@ class GroupAdapter(ClassAdapter): # We are a top-level container class like ProcessingModule base = self.build_base() # remove all the attributes and replace with child slot - base.classes[0].attributes.append(slot) + base.classes[0].attributes.update({slot.name: slot}) return base def handle_container_slot(self, cls: Group) -> BuildResult: @@ -167,30 +169,88 @@ class GroupAdapter(ClassAdapter): return BuildResult(slots=[slot]) - def build_subclasses(self) -> BuildResult: + def build_datasets(self) -> BuildResult: """ Build nested groups and datasets Create ClassDefinitions for each, but then also create SlotDefinitions that will be used as attributes linking the main class to the subclasses + + Datasets are simple, they are terminal classes, and all logic + for creating slots vs. classes is handled by the adapter class """ - # Datasets are simple, they are terminal classes, and all logic - # for creating slots vs. classes is handled by the adapter class dataset_res = BuildResult() if self.cls.datasets: for dset in self.cls.datasets: dset_adapter = DatasetAdapter(cls=dset, parent=self) dataset_res += dset_adapter.build() + return dataset_res + + def build_groups(self) -> BuildResult: + """ + Build subgroups, excluding pure container subgroups + """ group_res = BuildResult() if self.cls.groups: for group in self.cls.groups: + if is_container(group): + continue group_adapter = GroupAdapter(cls=group, parent=self) group_res += group_adapter.build() - res = dataset_res + group_res + return group_res + def build_containers(self) -> BuildResult: + """ + Build all container types into a single ``value`` slot + """ + res = BuildResult() + if not self.cls.groups: + return res + containers = [grp for grp in self.cls.groups if is_container(grp)] + if not containers: + return res + + if len(containers) == 1: + range = {"range": containers[0].neurodata_type_inc} + description = containers[0].doc + else: + range = {"any_of": [{"range": subcls.neurodata_type_inc} for subcls in containers]} + description = "\n\n".join([grp.doc for grp in containers]) + + slot = SlotDefinition( + name="value", + multivalued=True, + inlined=True, + inlined_as_list=False, + description=description, + **range, + ) + + if self.debug: # pragma: no cover - only used in development + slot.annotations["group_adapter"] = { + "tag": "slot_adapter", + "value": "container_value_slot", + } + res.slots = [slot] + return res + + def build_special_cases(self) -> BuildResult: + """ + Special cases, at this point just for NWBFile, which has + extra ``.specloc`` and ``specifications`` attrs + """ + res = BuildResult() + if self.cls.neurodata_type_def == "NWBFile": + res.slots = [ + SlotDefinition( + name="specifications", + range="dict", + description="Nested dictionary of schema specifications", + ), + ] return res def build_self_slot(self) -> SlotDefinition: diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 336bbf8..4b3d412 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -15,7 +15,7 @@ from linkml.generators import PydanticGenerator from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray from linkml.generators.pydanticgen.build import ClassResult, SlotResult from linkml.generators.pydanticgen.pydanticgen import SplitMode -from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule +from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule, ObjectImport from linkml_runtime.linkml_model.meta import ( ArrayExpression, SchemaDefinition, @@ -30,6 +30,7 @@ from nwb_linkml.includes.base import ( BASEMODEL_COERCE_CHILD, BASEMODEL_COERCE_VALUE, BASEMODEL_GETITEM, + BASEMODEL_EXTRA_TO_VALUE, ) from nwb_linkml.includes.hdmf import ( DYNAMIC_TABLE_IMPORTS, @@ -58,9 +59,15 @@ class NWBPydanticGenerator(PydanticGenerator): BASEMODEL_COERCE_VALUE, BASEMODEL_CAST_WITH_VALUE, BASEMODEL_COERCE_CHILD, + BASEMODEL_EXTRA_TO_VALUE, ) split: bool = True - imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")]) + imports: list[Import] = field( + default_factory=lambda: [ + Import(module="numpy", alias="np"), + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), + ] + ) schema_map: Optional[Dict[str, SchemaDefinition]] = None """See :meth:`.LinkMLProvider.build` for usage - a list of specific versions to import from""" diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index c081587..6cad4a3 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -3,7 +3,7 @@ Modifications to the ConfiguredBaseModel used by all generated classes """ BASEMODEL_GETITEM = """ - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: \"\"\"Try and get a value from value or "data" if we have it\"\"\" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -64,3 +64,23 @@ BASEMODEL_COERCE_CHILD = """ pass return v """ + +BASEMODEL_EXTRA_TO_VALUE = """ + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + \"\"\" + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + \"\"\" + if cls.model_config["extra"] == "forbid" and "value" in cls.model_fields and isinstance(v, dict): + extras = {key:val for key,val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v +""" diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py index 1691a46..d46465f 100644 --- a/nwb_linkml/src/nwb_linkml/io/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py @@ -35,7 +35,7 @@ import h5py import networkx as nx import numpy as np from numpydantic.interface.hdf5 import H5ArrayPath -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel from tqdm import tqdm from nwb_linkml.maps.hdf5 import ( @@ -166,24 +166,28 @@ def _load_node( raise TypeError(f"Nodes can only be h5py Datasets and Groups, got {obj}") if "neurodata_type" in obj.attrs: + # SPECIAL CASE: ignore `.specloc` + if ".specloc" in args: + del args[".specloc"] + model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"]) - try: - return model(**args) - except ValidationError as e1: - # try to restack extra fields into ``value`` - if "value" in model.model_fields: - value_dict = { - key: val for key, val in args.items() if key not in model.model_fields - } - for k in value_dict: - del args[k] - args["value"] = value_dict - try: - return model(**args) - except Exception as e2: - raise e2 from e1 - else: - raise e1 + # try: + return model(**args) + # except ValidationError as e1: + # # try to restack extra fields into ``value`` + # if "value" in model.model_fields: + # value_dict = { + # key: val for key, val in args.items() if key not in model.model_fields + # } + # for k in value_dict: + # del args[k] + # args["value"] = value_dict + # try: + # return model(**args) + # except Exception as e2: + # raise e2 from e1 + # else: + # raise e1 else: if "name" in args: diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py index fdde634..476e6e2 100644 --- a/nwb_linkml/src/nwb_linkml/lang_elements.py +++ b/nwb_linkml/src/nwb_linkml/lang_elements.py @@ -39,6 +39,10 @@ def _make_dtypes() -> List[TypeDefinition]: repr=linkml_reprs.get(nwbtype, None), ) DTypeTypes.append(atype) + + # a dict type! + DTypeTypes.append(TypeDefinition(name="dict", repr="dict")) + return DTypeTypes diff --git a/nwb_linkml/tests/test_io/test_io_nwb.py b/nwb_linkml/tests/test_io/test_io_nwb.py index 1ad51ed..32a50d1 100644 --- a/nwb_linkml/tests/test_io/test_io_nwb.py +++ b/nwb_linkml/tests/test_io/test_io_nwb.py @@ -80,7 +80,7 @@ def test_position(read_nwbfile, read_pynwb): py_trials = read_pynwb.trials.to_dataframe() pd.testing.assert_frame_equal(py_trials, trials) - spatial = read_nwbfile.processing["behavior"].Position.SpatialSeries + spatial = read_nwbfile.processing["behavior"]["Position"]["SpatialSeries"] py_spatial = read_pynwb.processing["behavior"]["Position"]["SpatialSeries"] _compare_attrs(spatial, py_spatial) assert np.array_equal(spatial[:], py_spatial.data[:]) diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 55fc94e..413b85b 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -19,37 +19,6 @@ from nwb_linkml.providers import LinkMLProvider, PydanticProvider from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo from nwb_linkml.io import schema as io - -def generate_core_yaml(output_path: Path, dry_run: bool = False, hdmf_only: bool = False): - """Just build the latest version of the core schema""" - - core = io.load_nwb_core(hdmf_only=hdmf_only) - built_schemas = core.build().schemas - for schema in built_schemas: - output_file = output_path / (schema.name + ".yaml") - if not dry_run: - yaml_dumper.dump(schema, output_file) - - -def generate_core_pydantic(yaml_path: Path, output_path: Path, dry_run: bool = False): - """Just generate the latest version of the core schema""" - for schema in yaml_path.glob("*.yaml"): - python_name = schema.stem.replace(".", "_").replace("-", "_") - pydantic_file = (output_path / python_name).with_suffix(".py") - - generator = NWBPydanticGenerator( - str(schema), - pydantic_version="2", - emit_metadata=True, - gen_classvars=True, - gen_slots=True, - ) - gen_pydantic = generator.serialize() - if not dry_run: - with open(pydantic_file, "w") as pfile: - pfile.write(gen_pydantic) - - def make_tmp_dir(clear: bool = False) -> Path: # use a directory underneath this one as the temporary directory rather than # the default hidden one @@ -68,6 +37,7 @@ def generate_versions( dry_run: bool = False, repo: GitRepo = NWB_CORE_REPO, pdb=False, + latest: bool = False, ): """ Generate linkml models for all versions @@ -82,8 +52,13 @@ def generate_versions( failed_versions = {} + if latest: + versions = [repo.namespace.versions[-1]] + else: + versions = repo.namespace.versions + overall_progress = Progress() - overall_task = overall_progress.add_task("All Versions", total=len(NWB_CORE_REPO.versions)) + overall_task = overall_progress.add_task("All Versions", total=len(versions)) build_progress = Progress( TextColumn( @@ -100,7 +75,7 @@ def generate_versions( linkml_task = None pydantic_task = None - for version in repo.namespace.versions: + for version in versions: # build linkml try: # check out the version (this should also refresh the hdmf-common schema) @@ -251,11 +226,10 @@ def main(): if not args.dry_run: args.yaml.mkdir(exist_ok=True) args.pydantic.mkdir(exist_ok=True) - if args.latest: - generate_core_yaml(args.yaml, args.dry_run) - generate_core_pydantic(args.yaml, args.pydantic, args.dry_run) - else: - generate_versions(args.yaml, args.pydantic, args.dry_run, repo, pdb=args.pdb) + + generate_versions( + args.yaml, args.pydantic, args.dry_run, repo, pdb=args.pdb, latest=args.latest + ) if __name__ == "__main__": From 7a0da1528c573dfcf3462e2e29dc2ff978df3e6c Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:27:36 -0700 Subject: [PATCH 13/18] regenerate models --- .../pydantic/core/v2_2_0/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_2_0/core_nwb_file.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_2_0/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 25 ++++++- .../core/v2_2_0/core_nwb_retinotopy.py | 25 ++++++- .../models/pydantic/core/v2_2_0/namespace.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_2_1/core_nwb_file.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_2_1/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 25 ++++++- .../core/v2_2_1/core_nwb_retinotopy.py | 25 ++++++- .../models/pydantic/core/v2_2_1/namespace.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_2_2/core_nwb_file.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_2_2/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 25 ++++++- .../core/v2_2_2/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_2_2/namespace.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_2_4/core_nwb_file.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_2_4/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 25 ++++++- .../core/v2_2_4/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_2_4/namespace.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_2_5/core_nwb_file.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_2_5/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 25 ++++++- .../core/v2_2_5/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_2_5/namespace.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_base.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_3_0/core_nwb_file.py | 51 ++++++++++---- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_3_0/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 25 ++++++- .../core/v2_3_0/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_3_0/namespace.py | 26 ++++++- .../pydantic/core/v2_4_0/core_nwb_base.py | 24 ++++++- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_4_0/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_4_0/core_nwb_file.py | 51 ++++++++++---- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_4_0/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_4_0/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 25 ++++++- .../core/v2_4_0/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_4_0/namespace.py | 26 ++++++- .../pydantic/core/v2_5_0/core_nwb_base.py | 24 ++++++- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_5_0/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_5_0/core_nwb_file.py | 51 ++++++++++---- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_5_0/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_5_0/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 25 ++++++- .../core/v2_5_0/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_5_0/namespace.py | 26 ++++++- .../core/v2_6_0_alpha/core_nwb_base.py | 24 ++++++- .../core/v2_6_0_alpha/core_nwb_behavior.py | 26 ++++++- .../core/v2_6_0_alpha/core_nwb_device.py | 26 ++++++- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 25 ++++++- .../core/v2_6_0_alpha/core_nwb_epoch.py | 25 ++++++- .../core/v2_6_0_alpha/core_nwb_file.py | 51 ++++++++++---- .../core/v2_6_0_alpha/core_nwb_icephys.py | 25 ++++++- .../core/v2_6_0_alpha/core_nwb_image.py | 26 ++++++- .../core/v2_6_0_alpha/core_nwb_misc.py | 25 ++++++- .../core/v2_6_0_alpha/core_nwb_ogen.py | 26 ++++++- .../core/v2_6_0_alpha/core_nwb_ophys.py | 25 ++++++- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 26 ++++++- .../pydantic/core/v2_6_0_alpha/namespace.py | 26 ++++++- .../pydantic/core/v2_7_0/core_nwb_base.py | 24 ++++++- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 26 ++++++- .../pydantic/core/v2_7_0/core_nwb_device.py | 26 ++++++- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 25 ++++++- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 25 ++++++- .../pydantic/core/v2_7_0/core_nwb_file.py | 51 ++++++++++---- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 25 ++++++- .../pydantic/core/v2_7_0/core_nwb_image.py | 26 ++++++- .../pydantic/core/v2_7_0/core_nwb_misc.py | 25 ++++++- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 26 ++++++- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 25 ++++++- .../core/v2_7_0/core_nwb_retinotopy.py | 26 ++++++- .../models/pydantic/core/v2_7_0/namespace.py | 26 ++++++- .../models/pydantic/hdmf_common/__init__.py | 1 - .../hdmf_common/v1_1_0/hdmf_common_sparse.py | 26 ++++++- .../hdmf_common/v1_1_0/hdmf_common_table.py | 24 ++++++- .../pydantic/hdmf_common/v1_1_0/namespace.py | 26 ++++++- .../hdmf_common/v1_1_2/hdmf_common_sparse.py | 26 ++++++- .../hdmf_common/v1_1_2/hdmf_common_table.py | 24 ++++++- .../pydantic/hdmf_common/v1_1_2/namespace.py | 26 ++++++- .../hdmf_common/v1_1_3/hdmf_common_sparse.py | 26 ++++++- .../hdmf_common/v1_1_3/hdmf_common_table.py | 24 ++++++- .../pydantic/hdmf_common/v1_1_3/namespace.py | 26 ++++++- .../hdmf_common/v1_2_0/hdmf_common_base.py | 47 ++++++++----- .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_2_0/hdmf_common_table.py | 63 ++++++++++++----- .../pydantic/hdmf_common/v1_2_0/namespace.py | 47 ++++++++----- .../hdmf_common/v1_2_1/hdmf_common_base.py | 47 ++++++++----- .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_2_1/hdmf_common_table.py | 63 ++++++++++++----- .../pydantic/hdmf_common/v1_2_1/namespace.py | 47 ++++++++----- .../hdmf_common/v1_3_0/hdmf_common_base.py | 47 ++++++++----- .../v1_3_0/hdmf_common_resources.py | 47 ++++++++----- .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_3_0/hdmf_common_table.py | 63 ++++++++++++----- .../pydantic/hdmf_common/v1_3_0/namespace.py | 47 ++++++++----- .../hdmf_common/v1_4_0/hdmf_common_base.py | 49 ++++++++----- .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_4_0/hdmf_common_table.py | 63 ++++++++++++----- .../pydantic/hdmf_common/v1_4_0/namespace.py | 47 ++++++++----- .../hdmf_common/v1_5_0/hdmf_common_base.py | 26 ++++++- .../hdmf_common/v1_5_0/hdmf_common_sparse.py | 26 ++++++- .../hdmf_common/v1_5_0/hdmf_common_table.py | 24 ++++++- .../pydantic/hdmf_common/v1_5_0/namespace.py | 26 ++++++- .../hdmf_common/v1_5_1/hdmf_common_base.py | 49 ++++++++----- .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_5_1/hdmf_common_table.py | 69 +++++++++++++------ .../pydantic/hdmf_common/v1_5_1/namespace.py | 47 ++++++++----- .../hdmf_common/v1_6_0/hdmf_common_base.py | 49 ++++++++----- .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_6_0/hdmf_common_table.py | 69 +++++++++++++------ .../pydantic/hdmf_common/v1_6_0/namespace.py | 47 ++++++++----- .../hdmf_common/v1_7_0/hdmf_common_base.py | 49 ++++++++----- .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 47 ++++++++----- .../hdmf_common/v1_7_0/hdmf_common_table.py | 69 +++++++++++++------ .../pydantic/hdmf_common/v1_7_0/namespace.py | 47 ++++++++----- .../hdmf_common/v1_8_0/hdmf_common_base.py | 26 ++++++- .../hdmf_common/v1_8_0/hdmf_common_sparse.py | 26 ++++++- .../hdmf_common/v1_8_0/hdmf_common_table.py | 24 ++++++- .../pydantic/hdmf_common/v1_8_0/namespace.py | 26 ++++++- .../v0_1_0/hdmf_experimental_experimental.py | 30 ++++++-- .../v0_1_0/hdmf_experimental_resources.py | 30 ++++++-- .../hdmf_experimental/v0_1_0/namespace.py | 33 +++++++-- .../v0_2_0/hdmf_experimental_experimental.py | 47 ++++++++----- .../v0_2_0/hdmf_experimental_resources.py | 47 ++++++++----- .../hdmf_experimental/v0_2_0/namespace.py | 47 ++++++++----- .../v0_3_0/hdmf_experimental_experimental.py | 47 ++++++++----- .../v0_3_0/hdmf_experimental_resources.py | 47 ++++++++----- .../hdmf_experimental/v0_3_0/namespace.py | 47 ++++++++----- .../v0_4_0/hdmf_experimental_experimental.py | 47 ++++++++----- .../v0_4_0/hdmf_experimental_resources.py | 47 ++++++++----- .../hdmf_experimental/v0_4_0/namespace.py | 47 ++++++++----- .../v0_5_0/hdmf_experimental_experimental.py | 26 ++++++- .../v0_5_0/hdmf_experimental_resources.py | 26 ++++++- .../hdmf_experimental/v0_5_0/namespace.py | 26 ++++++- .../linkml/core/v2_3_0/core.nwb.file.yaml | 52 +++++++------- .../linkml/core/v2_3_0/core.nwb.language.yaml | 3 + .../linkml/core/v2_4_0/core.nwb.file.yaml | 52 +++++++------- .../linkml/core/v2_4_0/core.nwb.language.yaml | 3 + .../linkml/core/v2_5_0/core.nwb.file.yaml | 52 +++++++------- .../linkml/core/v2_5_0/core.nwb.language.yaml | 3 + .../core/v2_6_0_alpha/core.nwb.file.yaml | 52 +++++++------- .../core/v2_6_0_alpha/core.nwb.language.yaml | 3 + .../linkml/core/v2_7_0/core.nwb.file.yaml | 52 +++++++------- .../linkml/core/v2_7_0/core.nwb.language.yaml | 3 + .../hdmf_common/v1_4_0/hdmf-common.base.yaml | 18 +++-- .../v1_4_0/hdmf-common.nwb.language.yaml | 3 + .../v1_5_0/hdmf-common.nwb.language.yaml | 5 +- .../hdmf_common/v1_5_1/hdmf-common.base.yaml | 18 +++-- .../v1_5_1/hdmf-common.nwb.language.yaml | 3 + .../hdmf_common/v1_5_1/hdmf-common.table.yaml | 28 ++++++-- .../hdmf_common/v1_6_0/hdmf-common.base.yaml | 18 +++-- .../v1_6_0/hdmf-common.nwb.language.yaml | 3 + .../hdmf_common/v1_6_0/hdmf-common.table.yaml | 28 ++++++-- .../hdmf_common/v1_7_0/hdmf-common.base.yaml | 18 +++-- .../v1_7_0/hdmf-common.nwb.language.yaml | 3 + .../hdmf_common/v1_7_0/hdmf-common.table.yaml | 28 ++++++-- .../v1_8_0/hdmf-common.nwb.language.yaml | 5 +- .../hdmf-experimental.experimental.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 5 +- .../v0_1_0/hdmf-experimental.resources.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 3 + .../hdmf-experimental.nwb.language.yaml | 3 + .../hdmf-experimental.nwb.language.yaml | 3 + .../hdmf-experimental.nwb.language.yaml | 5 +- scripts/generate_core.py | 1 + 223 files changed, 5337 insertions(+), 1113 deletions(-) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py index 0aca06c..534e1d1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_0.hdmf_common_table import Container, Data, DynamicTable @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 5136ecd..3b7be04 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py index c08c6ff..8f90b2e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 42d8653..e8f1dd5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 3eef400..f1c7f15 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py index 613b948..acbd7b1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import ( NWBContainer, @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -105,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py index be65250..0117aab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py index 8185f44..f77d91c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py index e7e1279..8f354bb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 100fc9b..b6cc006 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py index a8f98a2..5703a71 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import ( @@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -104,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 718a1c6..af02819 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_0.core_nwb_base import NWBData, NWBDataInterface @@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -96,6 +97,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py index 92e7d3e..510f1a8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_0.core_nwb_base import ( Image, @@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -214,6 +214,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py index d92282c..d8d3a5c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_2.hdmf_common_table import Container, Data, DynamicTable @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 2c853af..34a1425 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py index b1b1b16..154b9e0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index f74a7e7..d3aecd0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 7f4bb40..7d0c58f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py index b203b9b..6fb88f8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import ( NWBContainer, @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -105,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 772841e..dda06d4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py index 5e824fa..b79228e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py index e212e58..2de9d2b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 81dcca7..030cb10 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py index e054ec2..d77e6e8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import ( @@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -104,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 0b7b720..76510e5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_1.core_nwb_base import NWBData, NWBDataInterface @@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -96,6 +97,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py index 228bdac..c2dec9e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_1.core_nwb_base import ( Image, @@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -214,6 +214,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py index d2b5e5c..4ceb281 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 0935da2..0eb31ab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py index 4aa5ede..2db19bc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 4336705..fe36215 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_2.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 7731ed5..5eb1fae 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_2.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py index f694d2c..b207a06 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import ( NWBContainer, @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -105,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 4858631..6684ca5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_2.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py index 56bc5e9..69265e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py index 59564fe..52edd9e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_2.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py index 36196b3..3f45f68 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py index f2b41df..17bc9b5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_2.core_nwb_base import ( @@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -104,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 3f6e756..7371a8e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py index b89e5a5..e936b85 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_2.core_nwb_base import ( Image, @@ -162,7 +162,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -217,6 +217,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py index b163e24..28eae4b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 5bcab66..cc77ca9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py index f370d51..cfd7e71 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index f28980b..37357ce 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_4.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py index cb37363..35122ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_4.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py index 8172d22..ad07027 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import ( NWBContainer, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -106,6 +106,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 4c5a2cf..26d6210 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_4.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py index bf08d2b..08fed95 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py index f98882b..e382147 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_4.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 38f8335..64da160 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 1199e61..8c88d0e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_4.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 0af340d..75a8554 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py index 69651fd..f825bfd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_4.core_nwb_base import ( Image, @@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -224,6 +224,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py index 3afefe9..b57367d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py index f6ae2e5..e7f2cdd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py index eec3289..9399bcc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 90b585e..1ae2798 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_5.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 71eeb95..0d907f0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_5.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py index 6648e37..c280107 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import ( NWBContainer, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -106,6 +106,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 403fec4..15354c8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_5.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py index 9c670dd..bc724f8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py index e73895a..6d59495 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_5.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py index 8c8b746..42b2190 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py index a31241c..feab6d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_2_5.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 5d13706..2cd2579 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py index 6be8c81..0c925e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_2_5.core_nwb_base import ( Image, @@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -224,6 +224,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py index 5f82fcd..ef01b9a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable @@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -88,6 +88,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py index afb8921..727c35d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py index 2fc40f2..d44423e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 6af7ff2..109d3e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_3_0.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 0a22431..2f539de 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_3_0.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py index 13d67cb..2cc8c0b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import ( NWBContainer, @@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -106,6 +106,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -242,6 +264,9 @@ class NWBFile(NWBContainer): description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") + specifications: Optional[dict] = Field( + None, description="""Nested dictionary of schema specifications""" + ) class NWBFileStimulus(ConfiguredBaseModel): @@ -340,10 +365,6 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", ) - lab_meta_data: Optional[Dict[str, LabMetaData]] = Field( - None, - description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", - ) devices: Optional[Dict[str, Device]] = Field( None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", @@ -369,6 +390,10 @@ class NWBFileGeneral(ConfiguredBaseModel): description="""Metadata related to optophysiology.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}}, ) + value: Optional[Dict[str, LabMetaData]] = Field( + None, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) class GeneralSourceScript(ConfiguredBaseModel): @@ -404,12 +429,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel): } }, ) - electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field( - None, description="""Physical group of electrodes.""" - ) electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) + value: Optional[Dict[str, ElectrodeGroup]] = Field( + None, description="""Physical group of electrodes.""" + ) class ExtracellularEphysElectrodes(DynamicTable): @@ -565,12 +590,12 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", ) - intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field( - None, description="""An intracellular electrode.""" - ) sweep_table: Optional[SweepTable] = Field( None, description="""The table which groups different PatchClampSeries together.""" ) + value: Optional[Dict[str, IntracellularElectrode]] = Field( + None, description="""An intracellular electrode.""" + ) class NWBFileIntervals(ConfiguredBaseModel): @@ -596,7 +621,7 @@ class NWBFileIntervals(ConfiguredBaseModel): invalid_times: Optional[TimeIntervals] = Field( None, description="""Time intervals that should be removed from analysis.""" ) - time_intervals: Optional[Dict[str, TimeIntervals]] = Field( + value: Optional[Dict[str, TimeIntervals]] = Field( None, description="""Optional additional table(s) for describing other experimental time intervals.""", ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 6094a09..095685f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_3_0.core_nwb_base import ( @@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -107,6 +108,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py index 4964397..51e194b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync from ...core.v2_3_0.core_nwb_device import Device @@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -88,6 +88,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py index 95576ab..2b98315 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_3_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py index e66477e..9c2910f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 6813220..72c78b0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_3_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 67ebc9b..5a4f132 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py index c562bb8..6791a67 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_3_0.core_nwb_base import ( Image, @@ -186,7 +186,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -241,6 +241,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py index a3f929e..6375215 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +101,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 7a46192..ffc7c01 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py index 9510135..dc33187 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 44503f1..212152e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_4_0.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 1c1b5ef..443ab75 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_4_0.core_nwb_base import TimeSeries @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py index 26bd1fa..2c0dd21 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import ( NWBContainer, @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -114,6 +114,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -250,6 +272,9 @@ class NWBFile(NWBContainer): description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") + specifications: Optional[dict] = Field( + None, description="""Nested dictionary of schema specifications""" + ) class NWBFileStimulus(ConfiguredBaseModel): @@ -348,10 +373,6 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", ) - lab_meta_data: Optional[Dict[str, LabMetaData]] = Field( - None, - description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", - ) devices: Optional[Dict[str, Device]] = Field( None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", @@ -377,6 +398,10 @@ class NWBFileGeneral(ConfiguredBaseModel): description="""Metadata related to optophysiology.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}}, ) + value: Optional[Dict[str, LabMetaData]] = Field( + None, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) class GeneralSourceScript(ConfiguredBaseModel): @@ -412,12 +437,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel): } }, ) - electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field( - None, description="""Physical group of electrodes.""" - ) electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) + value: Optional[Dict[str, ElectrodeGroup]] = Field( + None, description="""Physical group of electrodes.""" + ) class ExtracellularEphysElectrodes(DynamicTable): @@ -573,9 +598,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", ) - intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field( - None, description="""An intracellular electrode.""" - ) sweep_table: Optional[SweepTable] = Field( None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""", @@ -600,6 +622,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""", ) + value: Optional[Dict[str, IntracellularElectrode]] = Field( + None, description="""An intracellular electrode.""" + ) class NWBFileIntervals(ConfiguredBaseModel): @@ -625,7 +650,7 @@ class NWBFileIntervals(ConfiguredBaseModel): invalid_times: Optional[TimeIntervals] = Field( None, description="""Time intervals that should be removed from analysis.""" ) - time_intervals: Optional[Dict[str, TimeIntervals]] = Field( + value: Optional[Dict[str, TimeIntervals]] = Field( None, description="""Optional additional table(s) for describing other experimental time intervals.""", ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py index d6191fa..1ec7e47 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_4_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py index 1ed0f7f..867e901 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync from ...core.v2_4_0.core_nwb_device import Device @@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -88,6 +88,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py index 4f4825b..c15bfaf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_4_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 64478a5..1b3b008 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 6e69afa..6eb8098 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_4_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index 3024bed..aa95910 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py index 2055051..287ae0d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_4_0.core_nwb_base import ( Image, @@ -199,7 +199,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -254,6 +254,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py index abb0545..eca9ced 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -112,6 +112,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 3df8abe..2b89eab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py index 6ac30a8..6af43d3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index eec63d7..7521407 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_5_0.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 39d2d4f..72fa554 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_5_0.core_nwb_base import TimeSeriesReferenceVectorData @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py index 338a1cb..b682562 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import ( Images, @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -115,6 +115,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -251,6 +273,9 @@ class NWBFile(NWBContainer): description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") + specifications: Optional[dict] = Field( + None, description="""Nested dictionary of schema specifications""" + ) class NWBFileStimulus(ConfiguredBaseModel): @@ -351,10 +376,6 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", ) - lab_meta_data: Optional[Dict[str, LabMetaData]] = Field( - None, - description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", - ) devices: Optional[Dict[str, Device]] = Field( None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", @@ -380,6 +401,10 @@ class NWBFileGeneral(ConfiguredBaseModel): description="""Metadata related to optophysiology.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}}, ) + value: Optional[Dict[str, LabMetaData]] = Field( + None, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) class GeneralSourceScript(ConfiguredBaseModel): @@ -415,12 +440,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel): } }, ) - electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field( - None, description="""Physical group of electrodes.""" - ) electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) + value: Optional[Dict[str, ElectrodeGroup]] = Field( + None, description="""Physical group of electrodes.""" + ) class ExtracellularEphysElectrodes(DynamicTable): @@ -576,9 +601,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", ) - intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field( - None, description="""An intracellular electrode.""" - ) sweep_table: Optional[SweepTable] = Field( None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""", @@ -603,6 +625,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""", ) + value: Optional[Dict[str, IntracellularElectrode]] = Field( + None, description="""An intracellular electrode.""" + ) class NWBFileIntervals(ConfiguredBaseModel): @@ -628,7 +653,7 @@ class NWBFileIntervals(ConfiguredBaseModel): invalid_times: Optional[TimeIntervals] = Field( None, description="""Time intervals that should be removed from analysis.""" ) - time_intervals: Optional[Dict[str, TimeIntervals]] = Field( + value: Optional[Dict[str, TimeIntervals]] = Field( None, description="""Optional additional table(s) for describing other experimental time intervals.""", ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 5628183..3a26054 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_5_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py index 4d62c19..ca85b5f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import ( Image, @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -94,6 +94,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py index 13bfb53..bbf932a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_5_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 724bd92..1d5b11d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 78e9c62..ff4af91 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_5_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index f886ffc..6dd0861 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py index 08d0b93..597113b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_5_0.core_nwb_base import ( Image, @@ -200,7 +200,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -255,6 +255,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 5a1224a..3524f64 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -112,6 +112,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 2abebb7..74fe1cd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index 4aa7351..a2f28f3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index d3bc1a8..d9c78cd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_6_0_alpha.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 907f235..c40d820 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_6_0_alpha.core_nwb_base import TimeSeriesReferenceVectorData @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 5ad7185..38cf653 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import ( Images, @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -115,6 +115,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -251,6 +273,9 @@ class NWBFile(NWBContainer): description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") + specifications: Optional[dict] = Field( + None, description="""Nested dictionary of schema specifications""" + ) class NWBFileStimulus(ConfiguredBaseModel): @@ -351,10 +376,6 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", ) - lab_meta_data: Optional[Dict[str, LabMetaData]] = Field( - None, - description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", - ) devices: Optional[Dict[str, Device]] = Field( None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", @@ -380,6 +401,10 @@ class NWBFileGeneral(ConfiguredBaseModel): description="""Metadata related to optophysiology.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}}, ) + value: Optional[Dict[str, LabMetaData]] = Field( + None, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) class GeneralSourceScript(ConfiguredBaseModel): @@ -415,12 +440,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel): } }, ) - electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field( - None, description="""Physical group of electrodes.""" - ) electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) + value: Optional[Dict[str, ElectrodeGroup]] = Field( + None, description="""Physical group of electrodes.""" + ) class ExtracellularEphysElectrodes(DynamicTable): @@ -576,9 +601,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", ) - intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field( - None, description="""An intracellular electrode.""" - ) sweep_table: Optional[SweepTable] = Field( None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""", @@ -603,6 +625,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""", ) + value: Optional[Dict[str, IntracellularElectrode]] = Field( + None, description="""An intracellular electrode.""" + ) class NWBFileIntervals(ConfiguredBaseModel): @@ -628,7 +653,7 @@ class NWBFileIntervals(ConfiguredBaseModel): invalid_times: Optional[TimeIntervals] = Field( None, description="""Time intervals that should be removed from analysis.""" ) - time_intervals: Optional[Dict[str, TimeIntervals]] = Field( + value: Optional[Dict[str, TimeIntervals]] = Field( None, description="""Optional additional table(s) for describing other experimental time intervals.""", ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 84a24e2..40071e1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_6_0_alpha.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index 733114f..f7700ad 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import ( Image, @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -94,6 +94,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 43160a9..40579cd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_6_0_alpha.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index f3e6a15..1353cf5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index d736ef5..2900441 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_6_0_alpha.core_nwb_base import ( @@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -114,6 +115,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index 334a02e..ef83e27 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py index dd19a19..86a32bf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_6_0_alpha.core_nwb_base import ( Image, @@ -202,7 +202,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -257,6 +257,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py index 6031ce0..9d752fe 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -112,6 +112,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py index d0d1919..e95ff25 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import ( NWBDataInterface, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py index 6d85cf6..f5199c6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import NWBContainer @@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -86,6 +86,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index 13b1cad..e5765f2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_7_0.core_nwb_base import ( @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py index d114b30..01fe7f1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_7_0.core_nwb_base import TimeSeriesReferenceVectorData @@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +102,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py index bcaf029..7dd78cf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import ( Images, @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -115,6 +115,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -251,6 +273,9 @@ class NWBFile(NWBContainer): description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") + specifications: Optional[dict] = Field( + None, description="""Nested dictionary of schema specifications""" + ) class NWBFileStimulus(ConfiguredBaseModel): @@ -359,10 +384,6 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", ) - lab_meta_data: Optional[Dict[str, LabMetaData]] = Field( - None, - description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", - ) devices: Optional[Dict[str, Device]] = Field( None, description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", @@ -388,6 +409,10 @@ class NWBFileGeneral(ConfiguredBaseModel): description="""Metadata related to optophysiology.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}}, ) + value: Optional[Dict[str, LabMetaData]] = Field( + None, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) class GeneralSourceScript(ConfiguredBaseModel): @@ -423,12 +448,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel): } }, ) - electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field( - None, description="""Physical group of electrodes.""" - ) electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) + value: Optional[Dict[str, ElectrodeGroup]] = Field( + None, description="""Physical group of electrodes.""" + ) class ExtracellularEphysElectrodes(DynamicTable): @@ -584,9 +609,6 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", ) - intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field( - None, description="""An intracellular electrode.""" - ) sweep_table: Optional[SweepTable] = Field( None, description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""", @@ -611,6 +633,9 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): None, description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""", ) + value: Optional[Dict[str, IntracellularElectrode]] = Field( + None, description="""An intracellular electrode.""" + ) class NWBFileIntervals(ConfiguredBaseModel): @@ -636,7 +661,7 @@ class NWBFileIntervals(ConfiguredBaseModel): invalid_times: Optional[TimeIntervals] = Field( None, description="""Time intervals that should be removed from analysis.""" ) - time_intervals: Optional[Dict[str, TimeIntervals]] = Field( + value: Optional[Dict[str, TimeIntervals]] = Field( None, description="""Optional additional table(s) for describing other experimental time intervals.""", ) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 937b260..f218898 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_7_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py index 331bb3a..9be36c0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import ( Image, @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -94,6 +94,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py index 8c06050..85d8ba5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_7_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -103,6 +104,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py index f2f31a2..f878664 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import ( NWBContainer, @@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -93,6 +93,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py index c842819..3116a14 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( RootModel, ValidationInfo, field_validator, + model_validator, ) from ...core.v2_7_0.core_nwb_base import ( @@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -110,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 0cdfaac..a440c91 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import NWBDataInterface @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py index 95df714..1e0d087 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...core.v2_7_0.core_nwb_base import ( Image, @@ -203,7 +203,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -258,6 +258,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py index 8b13789..e69de29 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/__init__.py @@ -1 +0,0 @@ - diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index f4811e5..ad32861 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -85,6 +85,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 0e5173a..abb7584 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -109,6 +109,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py index b494ff8..d751e29 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_0.hdmf_common_sparse import ( CSRMatrix, @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +101,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index 96a432f..6c16910 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -85,6 +85,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index f763497..e7aa06c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -109,6 +109,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py index a7b07c0..d4ed186 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_2.hdmf_common_sparse import ( CSRMatrix, @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +101,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index a702232..28e7b7f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -85,6 +85,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index b047088..d2b56a3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -54,7 +54,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -109,6 +109,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py index d0eb276..89152db 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_1_3.hdmf_common_sparse import ( CSRMatrix, @@ -46,7 +46,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -101,6 +101,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py index 62ac415..944bae6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py index 2e171cb..30f3337 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -30,7 +30,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -52,12 +52,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -68,13 +63,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -86,12 +76,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 135e3c6..956138f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py index 1bdd7c4..9e7cf0f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_2_0.hdmf_common_base import Container, Data from ...hdmf_common.v1_2_0.hdmf_common_sparse import ( @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -67,12 +67,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -83,13 +78,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -101,12 +91,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py index c166f2b..5ae420f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py index d68c966..a10c6e8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_2_1.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 1bcfbb6..8e5f858 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py index 69a3e11..8b71fd0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_2_1.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_2_1.hdmf_common_sparse import ( @@ -45,7 +45,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -67,12 +67,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -83,13 +78,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -101,12 +91,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py index ac4722f..055b7d8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index d24f662..f1efea0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py index 542738c..c2546ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_3_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 724cfba..905fd65 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py index 96107ed..9caab70 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_3_0.hdmf_common_resources import ( @@ -47,7 +47,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -69,12 +69,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -85,13 +80,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -103,12 +93,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py index 80ff949..9b140b9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -156,10 +171,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py index 2633579..1fd7ffa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_4_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 248a227..fc9e813 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py index 5e9f469..d3da688 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix @@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -61,12 +61,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -77,13 +72,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -95,12 +85,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 47eee69..f7c84ac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -84,6 +84,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index e805809..18b46fc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 00fa3ed..1e8961a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -111,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py index 6a2b5ed..962b332 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -95,6 +95,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py index adb45dd..dda9d1b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -156,10 +171,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py index 9731e34..68477d5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_1.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 68e87e2..046be46 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: @@ -996,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) + name: str = Field(...) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - name: str = Field(...) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py index 83568c9..fc5a53c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -62,12 +62,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -78,13 +73,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -96,12 +86,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py index 0ee3b97..2fc6f4d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -156,10 +171,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py index a94523d..f27c4ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_6_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 1fd0a39..c7e473b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: @@ -996,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) + name: str = Field(...) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - name: str = Field(...) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py index a8f55a0..f3760c9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -62,12 +62,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -78,13 +73,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -96,12 +86,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py index 08c9fda..9fac65b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -51,12 +51,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -67,13 +62,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -85,12 +75,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -156,10 +171,10 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) + name: str = Field(...) value: Optional[Dict[str, Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) - name: str = Field(...) # Model rebuild diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py index a6cd2db..648335c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_7_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index a81bd63..ed16d7f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -78,12 +78,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -94,13 +89,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -112,12 +102,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -595,12 +610,19 @@ class DynamicTableMixin(ConfiguredBaseModel): model[key] = to_cast(name=key, description="", value=val) except ValidationError as e: # pragma: no cover raise ValidationError.from_exception_data( - title=f"field {key} cannot be cast to VectorData from {val}", + title="cast_extra_columns", line_errors=[ { - "type": "model_type", + "type": "value_error", "input": val, - } + "loc": ("DynamicTableMixin", "cast_extra_columns"), + "ctx": { + "error": ValueError( + f"field {key} cannot be cast to {to_cast} from {val}" + ) + }, + }, + *e.errors(), ], ) from e return model @@ -663,11 +685,14 @@ class DynamicTableMixin(ConfiguredBaseModel): # should pass if we're supposed to be a VectorData column # don't want to override intention here by insisting that it is # *actually* a VectorData column in case an NDArray has been specified for now + description = cls.model_fields[info.field_name].description + description = description if description is not None else "" + return handler( annotation( val, name=info.field_name, - description=cls.model_fields[info.field_name].description, + description=description, ) ) except Exception: @@ -996,10 +1021,14 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) + name: str = Field(...) + categories: List[str] = Field( + ..., + description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""", + ) value: Optional[Dict[str, DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - name: str = Field(...) colnames: List[str] = Field( ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py index ed5ddbe..d698ce2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -62,12 +62,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -78,13 +73,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -96,12 +86,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index d58cc4b..65064da 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" @@ -29,7 +29,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -84,6 +84,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 09428fa..e89c2c7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_8_0.hdmf_common_base import Container @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 39b7af0..951260f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -111,6 +111,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py index 0e792f9..9d92f34 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix @@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -95,6 +95,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index e265d0c..0de98e0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -9,9 +9,9 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData +from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData metamodel_version = "None" @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -114,7 +136,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.experimental/", "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.experimental", } ) diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 867c41e..0548377 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -9,9 +9,9 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data +from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data metamodel_version = "None" @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -114,7 +136,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.resources/", "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.resources", } ) diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index d133165..7b0dc01 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -8,12 +8,11 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data, SimpleMultiContainer -from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix -from ...hdmf_common.v1_5_0.hdmf_common_table import ( - AlignedDynamicTable, +from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data, SimpleMultiContainer +from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix +from ...hdmf_common.v1_4_0.hdmf_common_table import ( DynamicTable, DynamicTableRegion, ElementIdentifiers, @@ -49,7 +48,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -104,6 +103,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py index c48bb0b..300fef7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index 0ef8026..936f1aa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py index 7be5bf0..14401ff 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -71,12 +71,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -87,13 +82,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -105,12 +95,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py index 894b2ab..19e0e47 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index 81be11e..6321d50 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py index fcc1422..9ad600b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix @@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -71,12 +71,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -87,13 +82,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -105,12 +95,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py index 6c9d22b..f1bd5a5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index 8f68ec9..4c3c8c0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -54,12 +54,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -70,13 +65,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -88,12 +78,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py index c79550d..651428d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -72,12 +72,7 @@ class ConfiguredBaseModel(BaseModel): try: return handler(v["value"]) except (IndexError, KeyError, TypeError): - raise ValueError( - f"coerce_value: Could not use the value field of {type(v)} " - f"to construct {cls.__name__}.{info.field_name}, " - f"expected type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + raise e1 @field_validator("*", mode="wrap") @classmethod @@ -88,13 +83,8 @@ class ConfiguredBaseModel(BaseModel): except Exception as e1: try: return handler({"value": v}) - except Exception as e2: - raise ValueError( - f"cast_with_value: Could not cast {type(v)} as value field for " - f"{cls.__name__}.{info.field_name}," - f" expected_type: {cls.model_fields[info.field_name].annotation}\n" - f"inner error: {str(e1)}" - ) from e1 + except Exception: + raise e1 @field_validator("*", mode="before") @classmethod @@ -106,12 +96,37 @@ class ConfiguredBaseModel(BaseModel): annotation = annotation.__args__[0] try: if issubclass(annotation, type(v)) and annotation is not type(v): - v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + if v.__pydantic_extra__: + v = annotation(**{**v.__dict__, **v.__pydantic_extra__}) + else: + v = annotation(**v.__dict__) except TypeError: # fine, annotation is a non-class type like a TypeVar pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 368d037..259d724 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index 8402336..c2267b3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data @@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -87,6 +87,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py index de810ad..e100b9d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -8,7 +8,7 @@ from enum import Enum from typing import Any, ClassVar, Dict, List, Literal, Optional, Union import numpy as np -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data, SimpleMultiContainer from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix @@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, val: Union[int, slice]) -> Any: + def __getitem__(self, val: Union[int, slice, str]) -> Any: """Try and get a value from value or "data" if we have it""" if hasattr(self, "value") and self.value is not None: return self.value[val] @@ -105,6 +105,28 @@ class ConfiguredBaseModel(BaseModel): pass return v + @model_validator(mode="before") + @classmethod + def gather_extra_to_value(cls, v: Any, handler) -> Any: + """ + For classes that don't allow extra fields and have a value slot, + pack those extra kwargs into ``value`` + """ + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): + extras = {key: val for key, val in v.items() if key not in cls.model_fields} + if extras: + for k in extras: + del v[k] + if "value" in v: + v["value"].update(extras) + else: + v["value"] = extras + return v + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml index be74485..e998eab 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.file.yaml @@ -219,6 +219,10 @@ classes: range: Units inlined: true inlined_as_list: false + specifications: + name: specifications + description: Nested dictionary of schema specifications + range: dict tree_root: true NWBFile__stimulus: name: NWBFile__stimulus @@ -372,14 +376,6 @@ classes: description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - lab_meta_data: - name: lab_meta_data - description: Place-holder than can be extended so that lab-specific meta-data - can be placed in /general. - range: LabMetaData - multivalued: true - inlined: true - inlined_as_list: false devices: name: devices description: Description of hardware devices used during experiment, e.g., @@ -424,6 +420,14 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane + value: + name: value + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + range: LabMetaData + multivalued: true + inlined: true + inlined_as_list: false general__source_script: name: general__source_script description: Script file or link to public source code used to create this NWB @@ -456,19 +460,19 @@ classes: range: string required: true equals_string: extracellular_ephys - electrode_group: - name: electrode_group - description: Physical group of electrodes. - range: ElectrodeGroup - multivalued: true - inlined: true - inlined_as_list: false electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes inlined: true inlined_as_list: true + value: + name: value + description: Physical group of electrodes. + range: ElectrodeGroup + multivalued: true + inlined: true + inlined_as_list: false extracellular_ephys__electrodes: name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. @@ -610,19 +614,19 @@ classes: frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries. range: text - intracellular_electrode: - name: intracellular_electrode - description: An intracellular electrode. - range: IntracellularElectrode - multivalued: true - inlined: true - inlined_as_list: false sweep_table: name: sweep_table description: The table which groups different PatchClampSeries together. range: SweepTable inlined: true inlined_as_list: false + value: + name: value + description: An intracellular electrode. + range: IntracellularElectrode + multivalued: true + inlined: true + inlined_as_list: false NWBFile__intervals: name: NWBFile__intervals description: Experimental intervals, whether that be logically distinct sub-experiments @@ -655,8 +659,8 @@ classes: range: TimeIntervals inlined: true inlined_as_list: false - time_intervals: - name: time_intervals + value: + name: value description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml index a921651..e36f824 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_3_0/core.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml index f11e44b..5157c95 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.file.yaml @@ -219,6 +219,10 @@ classes: range: Units inlined: true inlined_as_list: false + specifications: + name: specifications + description: Nested dictionary of schema specifications + range: dict tree_root: true NWBFile__stimulus: name: NWBFile__stimulus @@ -372,14 +376,6 @@ classes: description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - lab_meta_data: - name: lab_meta_data - description: Place-holder than can be extended so that lab-specific meta-data - can be placed in /general. - range: LabMetaData - multivalued: true - inlined: true - inlined_as_list: false devices: name: devices description: Description of hardware devices used during experiment, e.g., @@ -424,6 +420,14 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane + value: + name: value + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + range: LabMetaData + multivalued: true + inlined: true + inlined_as_list: false general__source_script: name: general__source_script description: Script file or link to public source code used to create this NWB @@ -456,19 +460,19 @@ classes: range: string required: true equals_string: extracellular_ephys - electrode_group: - name: electrode_group - description: Physical group of electrodes. - range: ElectrodeGroup - multivalued: true - inlined: true - inlined_as_list: false electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes inlined: true inlined_as_list: true + value: + name: value + description: Physical group of electrodes. + range: ElectrodeGroup + multivalued: true + inlined: true + inlined_as_list: false extracellular_ephys__electrodes: name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. @@ -611,13 +615,6 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - intracellular_electrode: - name: intracellular_electrode - description: An intracellular electrode. - range: IntracellularElectrode - multivalued: true - inlined: true - inlined_as_list: false sweep_table: name: sweep_table description: '[DEPRECATED] Table used to group different PatchClampSeries. @@ -677,6 +674,13 @@ classes: range: ExperimentalConditionsTable inlined: true inlined_as_list: false + value: + name: value + description: An intracellular electrode. + range: IntracellularElectrode + multivalued: true + inlined: true + inlined_as_list: false NWBFile__intervals: name: NWBFile__intervals description: Experimental intervals, whether that be logically distinct sub-experiments @@ -709,8 +713,8 @@ classes: range: TimeIntervals inlined: true inlined_as_list: false - time_intervals: - name: time_intervals + value: + name: value description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml index a921651..e36f824 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_4_0/core.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml index e42c8b3..82f7932 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.file.yaml @@ -219,6 +219,10 @@ classes: range: Units inlined: true inlined_as_list: false + specifications: + name: specifications + description: Nested dictionary of schema specifications + range: dict tree_root: true NWBFile__stimulus: name: NWBFile__stimulus @@ -373,14 +377,6 @@ classes: description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - lab_meta_data: - name: lab_meta_data - description: Place-holder than can be extended so that lab-specific meta-data - can be placed in /general. - range: LabMetaData - multivalued: true - inlined: true - inlined_as_list: false devices: name: devices description: Description of hardware devices used during experiment, e.g., @@ -425,6 +421,14 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane + value: + name: value + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + range: LabMetaData + multivalued: true + inlined: true + inlined_as_list: false general__source_script: name: general__source_script description: Script file or link to public source code used to create this NWB @@ -457,19 +461,19 @@ classes: range: string required: true equals_string: extracellular_ephys - electrode_group: - name: electrode_group - description: Physical group of electrodes. - range: ElectrodeGroup - multivalued: true - inlined: true - inlined_as_list: false electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes inlined: true inlined_as_list: true + value: + name: value + description: Physical group of electrodes. + range: ElectrodeGroup + multivalued: true + inlined: true + inlined_as_list: false extracellular_ephys__electrodes: name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. @@ -614,13 +618,6 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - intracellular_electrode: - name: intracellular_electrode - description: An intracellular electrode. - range: IntracellularElectrode - multivalued: true - inlined: true - inlined_as_list: false sweep_table: name: sweep_table description: '[DEPRECATED] Table used to group different PatchClampSeries. @@ -680,6 +677,13 @@ classes: range: ExperimentalConditionsTable inlined: true inlined_as_list: false + value: + name: value + description: An intracellular electrode. + range: IntracellularElectrode + multivalued: true + inlined: true + inlined_as_list: false NWBFile__intervals: name: NWBFile__intervals description: Experimental intervals, whether that be logically distinct sub-experiments @@ -712,8 +716,8 @@ classes: range: TimeIntervals inlined: true inlined_as_list: false - time_intervals: - name: time_intervals + value: + name: value description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml index a921651..e36f824 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_5_0/core.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml index 1a0cf34..85b1f65 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml @@ -219,6 +219,10 @@ classes: range: Units inlined: true inlined_as_list: false + specifications: + name: specifications + description: Nested dictionary of schema specifications + range: dict tree_root: true NWBFile__stimulus: name: NWBFile__stimulus @@ -373,14 +377,6 @@ classes: description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - lab_meta_data: - name: lab_meta_data - description: Place-holder than can be extended so that lab-specific meta-data - can be placed in /general. - range: LabMetaData - multivalued: true - inlined: true - inlined_as_list: false devices: name: devices description: Description of hardware devices used during experiment, e.g., @@ -425,6 +421,14 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane + value: + name: value + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + range: LabMetaData + multivalued: true + inlined: true + inlined_as_list: false general__source_script: name: general__source_script description: Script file or link to public source code used to create this NWB @@ -457,19 +461,19 @@ classes: range: string required: true equals_string: extracellular_ephys - electrode_group: - name: electrode_group - description: Physical group of electrodes. - range: ElectrodeGroup - multivalued: true - inlined: true - inlined_as_list: false electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes inlined: true inlined_as_list: true + value: + name: value + description: Physical group of electrodes. + range: ElectrodeGroup + multivalued: true + inlined: true + inlined_as_list: false extracellular_ephys__electrodes: name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. @@ -614,13 +618,6 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - intracellular_electrode: - name: intracellular_electrode - description: An intracellular electrode. - range: IntracellularElectrode - multivalued: true - inlined: true - inlined_as_list: false sweep_table: name: sweep_table description: '[DEPRECATED] Table used to group different PatchClampSeries. @@ -680,6 +677,13 @@ classes: range: ExperimentalConditionsTable inlined: true inlined_as_list: false + value: + name: value + description: An intracellular electrode. + range: IntracellularElectrode + multivalued: true + inlined: true + inlined_as_list: false NWBFile__intervals: name: NWBFile__intervals description: Experimental intervals, whether that be logically distinct sub-experiments @@ -712,8 +716,8 @@ classes: range: TimeIntervals inlined: true inlined_as_list: false - time_intervals: - name: time_intervals + value: + name: value description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml index a921651..e36f824 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml index fcd10dc..35f877e 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -219,6 +219,10 @@ classes: range: Units inlined: true inlined_as_list: false + specifications: + name: specifications + description: Nested dictionary of schema specifications + range: dict tree_root: true NWBFile__stimulus: name: NWBFile__stimulus @@ -375,14 +379,6 @@ classes: description: Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. range: text - lab_meta_data: - name: lab_meta_data - description: Place-holder than can be extended so that lab-specific meta-data - can be placed in /general. - range: LabMetaData - multivalued: true - inlined: true - inlined_as_list: false devices: name: devices description: Description of hardware devices used during experiment, e.g., @@ -427,6 +423,14 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane + value: + name: value + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + range: LabMetaData + multivalued: true + inlined: true + inlined_as_list: false general__source_script: name: general__source_script description: Script file or link to public source code used to create this NWB @@ -459,19 +463,19 @@ classes: range: string required: true equals_string: extracellular_ephys - electrode_group: - name: electrode_group - description: Physical group of electrodes. - range: ElectrodeGroup - multivalued: true - inlined: true - inlined_as_list: false electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. range: extracellular_ephys__electrodes inlined: true inlined_as_list: true + value: + name: value + description: Physical group of electrodes. + range: ElectrodeGroup + multivalued: true + inlined: true + inlined_as_list: false extracellular_ephys__electrodes: name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. @@ -616,13 +620,6 @@ classes: etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.' range: text - intracellular_electrode: - name: intracellular_electrode - description: An intracellular electrode. - range: IntracellularElectrode - multivalued: true - inlined: true - inlined_as_list: false sweep_table: name: sweep_table description: '[DEPRECATED] Table used to group different PatchClampSeries. @@ -682,6 +679,13 @@ classes: range: ExperimentalConditionsTable inlined: true inlined_as_list: false + value: + name: value + description: An intracellular electrode. + range: IntracellularElectrode + multivalued: true + inlined: true + inlined_as_list: false NWBFile__intervals: name: NWBFile__intervals description: Experimental intervals, whether that be logically distinct sub-experiments @@ -714,8 +718,8 @@ classes: range: TimeIntervals inlined: true inlined_as_list: false - time_intervals: - name: time_intervals + value: + name: value description: Optional additional table(s) for describing other experimental time intervals. range: TimeIntervals diff --git a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml index a921651..e36f824 100644 --- a/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/core/v2_7_0/core.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml index 6ba8106..5f37d89 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml index 9b6bc55..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml index 1842589..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml index 4fd80e6..4173cbc 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml index 9b6bc55..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml index 513a5d4..44a3b23 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml @@ -189,10 +189,26 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + required: true + multivalued: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml index beb539c..5b2a0ef 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml index 9b6bc55..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml index 5613666..d53e72f 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml @@ -189,10 +189,26 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + required: true + multivalued: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml index f65f22b..652ffad 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml @@ -38,10 +38,16 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container + name: + name: name + identifier: true + range: string + required: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml index 9b6bc55..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml index 36dd411..274356e 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml @@ -189,10 +189,26 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - - name: value - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable + name: + name: name + identifier: true + range: string + required: true + categories: + name: categories + description: The names of the categories in this AlignedDynamicTable. Each + category is represented by one DynamicTable stored in the parent group. + This attribute should be used to specify an order of categories and the + category names must match the names of the corresponding DynamicTable in + the group. + range: text + required: true + multivalued: true + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable tree_root: true diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml index 1842589..6b8ce10 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index b0b87d5..2a10ba2 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.experimental version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.experimental/ classes: diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml index 650c484..dcf2549 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index 9aeb7d0..a8d955d 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.resources version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.resources/ classes: diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml index b487163..dcf2549 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml index b487163..dcf2549 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml index b487163..dcf2549 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml index 650c484..dcf2549 100644 --- a/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_models/src/nwb_models/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -88,6 +88,9 @@ types: isodatetime: name: isodatetime typeof: datetime + dict: + name: dict + repr: dict classes: AnyType: name: AnyType diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 413b85b..53a7574 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -19,6 +19,7 @@ from nwb_linkml.providers import LinkMLProvider, PydanticProvider from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo from nwb_linkml.io import schema as io + def make_tmp_dir(clear: bool = False) -> Path: # use a directory underneath this one as the temporary directory rather than # the default hidden one From a6ab1cf61b11818db4a9a3daba6eb46192b41540 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:40:46 -0700 Subject: [PATCH 14/18] regenerate models --- nwb_linkml/src/nwb_linkml/adapters/namespaces.py | 2 +- nwb_linkml/src/nwb_linkml/generators/pydantic.py | 4 ++-- nwb_linkml/src/nwb_linkml/includes/base.py | 8 ++++++-- .../models/pydantic/core/v2_2_0/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/namespace.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/namespace.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/namespace.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/namespace.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/namespace.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/namespace.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/namespace.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/namespace.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_base.py | 2 +- .../pydantic/core/v2_6_0_alpha/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/namespace.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_1_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_1_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_1_2/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_1_2/namespace.py | 2 +- .../pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_1_3/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_1_3/namespace.py | 2 +- .../pydantic/hdmf_common/v1_2_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_2_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_2_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_2_1/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_2_1/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_2_1/namespace.py | 2 +- .../pydantic/hdmf_common/v1_3_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py | 2 +- .../pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_3_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_4_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_4_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_4_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_5_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_5_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_5_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_5_1/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_5_1/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_5_1/namespace.py | 2 +- .../pydantic/hdmf_common/v1_6_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_6_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_6_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_7_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_7_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_7_0/namespace.py | 2 +- .../pydantic/hdmf_common/v1_8_0/hdmf_common_base.py | 2 +- .../pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py | 2 +- .../pydantic/hdmf_common/v1_8_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_8_0/namespace.py | 2 +- .../v0_1_0/hdmf_experimental_experimental.py | 2 +- .../v0_1_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_1_0/namespace.py | 2 +- .../v0_2_0/hdmf_experimental_experimental.py | 2 +- .../v0_2_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_2_0/namespace.py | 2 +- .../v0_3_0/hdmf_experimental_experimental.py | 2 +- .../v0_3_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_3_0/namespace.py | 2 +- .../v0_4_0/hdmf_experimental_experimental.py | 2 +- .../v0_4_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_4_0/namespace.py | 2 +- .../v0_5_0/hdmf_experimental_experimental.py | 2 +- .../v0_5_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_5_0/namespace.py | 2 +- 194 files changed, 200 insertions(+), 196 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 6aa68ad..76d1835 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -198,7 +198,7 @@ class NamespacesAdapter(Adapter): for i, parent in enumerate(parents): # we want a full roll-down of all the ancestor classes, # but we make an abbreviated leaf class - complete = False if i == len(parents) - 1 else True + complete = i != len(parents) - 1 new_cls = roll_down_nwb_class(new_cls, parent, complete=complete) new_cls: Group | Dataset = type(cls)(**new_cls) new_cls.parent = cls.parent diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 4b3d412..659bd2b 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -15,7 +15,7 @@ from linkml.generators import PydanticGenerator from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray from linkml.generators.pydanticgen.build import ClassResult, SlotResult from linkml.generators.pydanticgen.pydanticgen import SplitMode -from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule, ObjectImport +from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport, PydanticModule from linkml_runtime.linkml_model.meta import ( ArrayExpression, SchemaDefinition, @@ -29,8 +29,8 @@ from nwb_linkml.includes.base import ( BASEMODEL_CAST_WITH_VALUE, BASEMODEL_COERCE_CHILD, BASEMODEL_COERCE_VALUE, - BASEMODEL_GETITEM, BASEMODEL_EXTRA_TO_VALUE, + BASEMODEL_GETITEM, ) from nwb_linkml.includes.hdmf import ( DYNAMIC_TABLE_IMPORTS, diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index 6cad4a3..8efb70b 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -68,12 +68,16 @@ BASEMODEL_COERCE_CHILD = """ BASEMODEL_EXTRA_TO_VALUE = """ @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: \"\"\" For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` \"\"\" - if cls.model_config["extra"] == "forbid" and "value" in cls.model_fields and isinstance(v, dict): + if ( + cls.model_config["extra"] == "forbid" + and "value" in cls.model_fields + and isinstance(v, dict) + ): extras = {key:val for key,val in v.items() if key not in cls.model_fields} if extras: for k in extras: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py index 534e1d1..99bd58a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 3b7be04..675d43e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py index 8f90b2e..842582e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index e8f1dd5..3fcf31c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py index f1c7f15..d6f68ac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py index acbd7b1..07e39f6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 0117aab..6b9244d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py index f77d91c..45bb785 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py index 8f354bb..965c725 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py index b6cc006..622892d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 5703a71..91959e3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index af02819..60df84f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -99,7 +99,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py index 510f1a8..9c30f67 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py @@ -216,7 +216,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py index d8d3a5c..a890899 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 34a1425..68a46bd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py index 154b9e0..10c6d89 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index d3aecd0..16d95d3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 7d0c58f..90da274 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py index 6fb88f8..b7883ce 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py index dda06d4..01d99d3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py index b79228e..62f934a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py index 2de9d2b..06dc024 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 030cb10..c91e213 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py index d77e6e8..ea27fb7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 76510e5..a825a37 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -99,7 +99,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py index c2dec9e..46dd45c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py @@ -216,7 +216,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py index 4ceb281..e52df8a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 0eb31ab..f721a5a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py index 2db19bc..5704067 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index fe36215..5b940a1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 5eb1fae..3a19363 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py index b207a06..dfc2a4f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 6684ca5..bb4ae6f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py index 69265e7..bd5b8a7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py index 52edd9e..e50516b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py index 3f45f68..5d47b5a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 17bc9b5..38df5a8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 7371a8e..eed7fc3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py index e936b85..d66e221 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py @@ -219,7 +219,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py index 28eae4b..b213daa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py index cc77ca9..43b5e9a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py index cfd7e71..66fa713 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 37357ce..bd43aa1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 35122ba..9f56da5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py index ad07027..a6b639e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -108,7 +108,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 26d6210..469cf8f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py index 08fed95..1dab10d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py index e382147..13c5073 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 64da160..a9bc385 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 8c88d0e..afa1762 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 75a8554..6368785 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py index f825bfd..5953b5c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py @@ -226,7 +226,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py index b57367d..0b42841 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py index e7f2cdd..161f397 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py index 9399bcc..2b88622 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 1ae2798..32fd648 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 0d907f0..c9ac1c9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py index c280107..55053b9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -108,7 +108,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 15354c8..998394c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py index bc724f8..9eaf4a7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py index 6d59495..376cccf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py index 42b2190..d6401b0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py index feab6d7..53ce535 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 2cd2579..1f4bf8f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py index 0c925e7..b9280bb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py @@ -226,7 +226,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py index ef01b9a..1fefda7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -90,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py index 727c35d..c2d0ae3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py index d44423e..0cbb7d1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 109d3e7..5b6e43e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 2f539de..c448557 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py index 2cc8c0b..c89c787 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -108,7 +108,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 095685f..023c7f2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -110,7 +110,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py index 51e194b..57964f8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -90,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py index 2b98315..bfdee66 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py index 9c2910f..5c7bb29 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 72c78b0..18fdbdf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 5a4f132..6c4b38d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py index 6791a67..115672a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py @@ -243,7 +243,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py index 6375215..337f9ab 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -103,7 +103,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py index ffc7c01..f0d616a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py index dc33187..acb4357 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 212152e..b122644 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 443ab75..7d21b94 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py index 2c0dd21..5c49ad0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -116,7 +116,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 1ec7e47..ba89f45 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py index 867e901..3ae1c76 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -90,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py index c15bfaf..d7fd27e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 1b3b008..c631b8b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 6eb8098..99f01c8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index aa95910..be229ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py index 287ae0d..4ff416d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py @@ -256,7 +256,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py index eca9ced..0bca23b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -114,7 +114,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 2b89eab..f6aa28a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py index 6af43d3..981b650 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 7521407..638cc74 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 72fa554..d4bbff5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py index b682562..46996e4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -117,7 +117,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 3a26054..521a9ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py index ca85b5f..778aff2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -96,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py index bbf932a..5a482d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 1d5b11d..2f98119 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py index ff4af91..59d16ba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 6dd0861..7f2c8e1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py index 597113b..fc69cfa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py @@ -257,7 +257,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 3524f64..9bd14b4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -114,7 +114,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 74fe1cd..ff83b67 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index a2f28f3..f59045a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index d9c78cd..3158ed1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index c40d820..f58fa82 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 38cf653..b128e6d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -117,7 +117,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 40071e1..f134579 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index f7700ad..bbc8b6f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -96,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 40579cd..48dcc78 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 1353cf5..47f364e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index 2900441..ded326f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -117,7 +117,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index ef83e27..d66750d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py index 86a32bf..fa08cde 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -259,7 +259,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py index 9d752fe..c081f4a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -114,7 +114,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py index e95ff25..4eda0f3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py index f5199c6..4690c0f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -88,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index e5765f2..434466a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 01fe7f1..24830d7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py index 7dd78cf..cb1a3eb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -117,7 +117,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py index f218898..3234820 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py index 9be36c0..49898d6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -96,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py index 85d8ba5..14615eb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py index f878664..ca48d75 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -95,7 +95,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 3116a14..2156655 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index a440c91..880636c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py index 1e0d087..154f9b6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py @@ -260,7 +260,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index ad32861..cce2205 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index abb7584..072d163 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -111,7 +111,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py index d751e29..6cf0338 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -103,7 +103,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index 6c16910..d2f5b57 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index e7aa06c..6093ac5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -111,7 +111,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py index d4ed186..4494201 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -103,7 +103,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index 28e7b7f..dddcb72 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index d2b56a3..5905a23 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -111,7 +111,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py index 89152db..4ab0cc3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -103,7 +103,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py index 944bae6..bb0f889 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py index 30f3337..57813a3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 956138f..0bb0467 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py index 9e7cf0f..c487cad 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -102,7 +102,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py index 5ae420f..211589e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py index a10c6e8..8c719c9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 8e5f858..122d3d3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py index 8b71fd0..f31f4aa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -102,7 +102,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py index 055b7d8..66f655c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index f1efea0..7f85a2d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py index c2546ba..9d7d2b8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 905fd65..470c9e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py index 9caab70..dc608ea 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -104,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py index 9b140b9..bf2e62d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py index 1fd7ffa..038d9e3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index fc9e813..7f094be 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py index d3da688..fa75d95 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -96,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index f7c84ac..5331a77 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index 18b46fc..5e8bdf2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 1e8961a..e159c68 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py index 962b332..a4c98ce 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -97,7 +97,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py index dda9d1b..842c43c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py index 68477d5..a464f6f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 046be46..e348fae 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py index fc5a53c..6f39f3d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -97,7 +97,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py index 2fc6f4d..d34d545 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py index f27c4ba..960dbca 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index c7e473b..bb7a801 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py index f3760c9..455af25 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -97,7 +97,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py index 9fac65b..8601653 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py index 648335c..ae6311b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index ed16d7f..50d610a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py index d698ce2..ea23e7a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -97,7 +97,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index 65064da..af5d968 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index e89c2c7..172fa26 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 951260f..d477089 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -113,7 +113,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py index 9d92f34..9676686 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -97,7 +97,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 0de98e0..b494547 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 0548377..2c2d524 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 7b0dc01..0655c54 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -105,7 +105,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py index 300fef7..fd0eba4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index 936f1aa..97c423f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py index 14401ff..9cb1518 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py index 19e0e47..641dc7e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index 6321d50..c57c710 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py index 9ad600b..3abf7d1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -106,7 +106,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py index f1bd5a5..ba3e42d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index 4c3c8c0..42d30a5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py index 651428d..c0ecd43 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 259d724..8ec11c3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index c2267b3..6493a27 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -89,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py index e100b9d..160f875 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -107,7 +107,7 @@ class ConfiguredBaseModel(BaseModel): @model_validator(mode="before") @classmethod - def gather_extra_to_value(cls, v: Any, handler) -> Any: + def gather_extra_to_value(cls, v: Any) -> Any: """ For classes that don't allow extra fields and have a value slot, pack those extra kwargs into ``value`` From 58affeb5db2784528d9263240a5e88a9846ada26 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:49:29 -0700 Subject: [PATCH 15/18] regenerate models --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 2 +- .../models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py | 2 +- .../models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index df73d68..3d456d0 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -386,7 +386,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 072d163..6e9755e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -677,7 +677,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 6093ac5..416c28c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -677,7 +677,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 5905a23..aff7d58 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -677,7 +677,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 0bb0467..96a4328 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 122d3d3..52d057e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 470c9e7..9a305bd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 7f094be..5ab6a67 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index e159c68..cce6b51 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index e348fae..08b3530 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index bb7a801..ce357b4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 50d610a..d94c2b7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index d477089..f47733e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -679,7 +679,7 @@ class DynamicTableMixin(ConfiguredBaseModel): return handler(val) except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation - if type(annotation).__name__ == "_UnionGenericAlias": + while hasattr(annotation, "__args__"): annotation = annotation.__args__[0] try: # should pass if we're supposed to be a VectorData column From dfb2f3701bb33dda39cd3e502955d77a17a250f3 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:50:25 -0700 Subject: [PATCH 16/18] codespell got me again! --- nwb_linkml/src/nwb_linkml/includes/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py index 8efb70b..4747f57 100644 --- a/nwb_linkml/src/nwb_linkml/includes/base.py +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -34,7 +34,7 @@ BASEMODEL_CAST_WITH_VALUE = """ @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - \"\"\"Try to rescue instantiation by casting into the model's value fiel\"\"\" + \"\"\"Try to rescue instantiation by casting into the model's value field\"\"\" try: return handler(v) except Exception as e1: From 8c76ce82c34eae559cf9a0c69f1cc757645b6a03 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 26 Sep 2024 01:59:45 -0700 Subject: [PATCH 17/18] one last model generation --- .../src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_0/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_0/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_1/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_1/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_2/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_2/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_4/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_4/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_2_5/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_2_5/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_3_0/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_3_0/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_4_0/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_4_0/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_5_0/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_5_0/namespace.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_base.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_device.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_file.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_image.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py | 2 +- .../nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py | 2 +- .../nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py | 2 +- .../models/pydantic/core/v2_7_0/core_nwb_retinotopy.py | 2 +- .../src/nwb_models/models/pydantic/core/v2_7_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py | 2 +- .../models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py | 2 +- .../models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py | 2 +- .../models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py | 2 +- .../nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py | 2 +- .../hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py | 2 +- .../hdmf_experimental/v0_1_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_1_0/namespace.py | 2 +- .../hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py | 2 +- .../hdmf_experimental/v0_2_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_2_0/namespace.py | 2 +- .../hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py | 2 +- .../hdmf_experimental/v0_3_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_3_0/namespace.py | 2 +- .../hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py | 2 +- .../hdmf_experimental/v0_4_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_4_0/namespace.py | 2 +- .../hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py | 2 +- .../hdmf_experimental/v0_5_0/hdmf_experimental_resources.py | 2 +- .../models/pydantic/hdmf_experimental/v0_5_0/namespace.py | 2 +- 191 files changed, 191 insertions(+), 191 deletions(-) diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py index 99bd58a..b1c670c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 675d43e..5a63426 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py index 842582e..bdd469e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 3fcf31c..a3d315d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py index d6f68ac..2a4db9b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py index 07e39f6..5cc40db 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 6b9244d..e4213de 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py index 45bb785..a42cad3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py index 965c725..021de7b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 622892d..44f7e6a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 91959e3..eda0cf9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 60df84f..3031e3c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -69,7 +69,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py index 9c30f67..805e58c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_0/namespace.py @@ -186,7 +186,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py index a890899..044db0d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 68a46bd..aaf0f41 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py index 10c6d89..3ad7dd1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index 16d95d3..4d16d36 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 90da274..ce0dbfd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py index b7883ce..49a1846 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 01d99d3..19b2f81 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py index 62f934a..990cc6a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py index 06dc024..2249720 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py index c91e213..64649f4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py index ea27fb7..deecf64 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index a825a37..dd8a84e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -69,7 +69,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py index 46dd45c..cbab2e1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_1/namespace.py @@ -186,7 +186,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py index e52df8a..bce6112 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py index f721a5a..1536adb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py index 5704067..de4bd04 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 5b940a1..22e0ff7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 3a19363..d599a8c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py index dfc2a4f..8b5d38b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py index bb4ae6f..464dcc8 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py index bd5b8a7..b88c1b1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py index e50516b..e12bfb2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py index 5d47b5a..7bdc063 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 38df5a8..2ac1358 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index eed7fc3..577cd33 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py index d66e221..9dbeca9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_2/namespace.py @@ -189,7 +189,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py index b213daa..8c5293b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 43b5e9a..0775e8f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py index 66fa713..63b8926 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index bd43aa1..0db2758 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 9f56da5..7ed9c06 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py index a6b639e..32f00ff 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -78,7 +78,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 469cf8f..0a4bf27 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py index 1dab10d..f0fb808 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py index 13c5073..65b6dc0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py index a9bc385..af170bd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py index afa1762..91a59a0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 6368785..ba2cc17 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py index 5953b5c..0950468 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_4/namespace.py @@ -196,7 +196,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py index 0b42841..36e9a95 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 161f397..f15481e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py index 2b88622..124d12d 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 32fd648..39d0aad 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py index c9ac1c9..d610d4a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py index 55053b9..fdcd4c9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -78,7 +78,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 998394c..3c878c5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py index 9eaf4a7..79f339a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py index 376cccf..969c646 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py index d6401b0..763c7f3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 53ce535..b106054 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 1f4bf8f..2be5f1a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py index b9280bb..849cf0b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_2_5/namespace.py @@ -196,7 +196,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py index 1fefda7..001d53e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py index c2d0ae3..b689f74 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py index 0cbb7d1..791d2a1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 5b6e43e..b9d7e8e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py index c448557..265974b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py index c89c787..841bfb6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -78,7 +78,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 023c7f2..a6e07ed 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -80,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py index 57964f8..cb9f5bf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py index bfdee66..1fc5516 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py index 5c7bb29..95c003f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 18fdbdf..a87f028 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 6c4b38d..1ad221c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py index 115672a..b43007a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_3_0/namespace.py @@ -213,7 +213,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py index 337f9ab..c823f7f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -73,7 +73,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py index f0d616a..4bb3545 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py index acb4357..60fafeb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index b122644..607a1fc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 7d21b94..009e0cb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py index 5c49ad0..6f0fcea 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -86,7 +86,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py index ba89f45..d8b96b9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py index 3ae1c76..d4f8f0e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -60,7 +60,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py index d7fd27e..d406c06 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py index c631b8b..58102f2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 99f01c8..07b0738 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index be229ba..9943610 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py index 4ff416d..5006e35 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_4_0/namespace.py @@ -226,7 +226,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py index 0bca23b..6298cf0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -84,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py index f6aa28a..8aa7fa4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py index 981b650..1dd9b18 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 638cc74..4091b82 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py index d4bbff5..512ad37 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py index 46996e4..ef8da1f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 521a9ba..bb6fee9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py index 778aff2..cce6df5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py index 5a482d7..9e3fe7c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 2f98119..5d5b7fb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 59d16ba..cd6b4a6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 7f2c8e1..01bb0a3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py index fc69cfa..8704767 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_5_0/namespace.py @@ -227,7 +227,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 9bd14b4..2d699e9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -84,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index ff83b67..90ce123 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index f59045a..9ef9121 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 3158ed1..92800fd 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index f58fa82..5951970 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index b128e6d..a4b7a33 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index f134579..a73acb9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index bbc8b6f..50d66f2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 48dcc78..e429b8c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 47f364e..797d656 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index ded326f..98880c6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index d66750d..1982ed6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py index fa08cde..deb4ce2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -229,7 +229,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py index c081f4a..6c8a7fb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -84,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py index 4eda0f3..d22df1e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py index 4690c0f..0e96640 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -58,7 +58,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index 434466a..85141cf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 24830d7..e5cc476 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py index cb1a3eb..2b52d85 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -87,7 +87,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 3234820..7a77474 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py index 49898d6..66edd17 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py index 14615eb..513f113 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py index ca48d75..8e6d7c3 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -65,7 +65,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 2156655..5fdf2a0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 880636c..8affbfc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py index 154f9b6..68e374e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/core/v2_7_0/namespace.py @@ -230,7 +230,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index cce2205..c6e3962 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 6e9755e..4b7bebf 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -81,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py index 6cf0338..37df218 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -73,7 +73,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index d2f5b57..3ad3d86 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 416c28c..df885a6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -81,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py index 4494201..c65b595 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -73,7 +73,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index dddcb72..51a407e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index aff7d58..454e1eb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -81,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py index 4ab0cc3..5c85466 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -73,7 +73,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py index bb0f889..14abdac 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py index 57813a3..3e588f1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -57,7 +57,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 96a4328..e7458c5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py index c487cad..f8b7421 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -72,7 +72,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py index 211589e..1900b5a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py index 8c719c9..e76ebb6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 52d057e..129bd39 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py index f31f4aa..9c30289 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -72,7 +72,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py index 66f655c..73dfb25 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index 7f85a2d..3405369 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py index 9d7d2b8..718593e 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 9a305bd..1fe61d6 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py index dc608ea..7988815 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -74,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py index bf2e62d..11ae3a5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py index 038d9e3..018a821 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 5ab6a67..43b0dbb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py index fa75d95..fb4e7ae 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -66,7 +66,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 5331a77..6226eb1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index 5e8bdf2..7b50f61 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index cce6b51..37d0124 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py index a4c98ce..8d5c266 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py index 842c43c..6610cdc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py index a464f6f..b3d639b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 08b3530..e318d77 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py index 6f39f3d..f9021e5 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py index d34d545..86feeaa 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py index 960dbca..23b5fe0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index ce357b4..bccef50 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py index 455af25..a83f1a7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py index 8601653..54ccd77 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py index ae6311b..01afd96 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index d94c2b7..2ee12e7 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py index ea23e7a..ac47d34 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index af5d968..2d2b0b9 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -56,7 +56,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 172fa26..1aad5a2 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index f47733e..b779c48 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -83,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py index 9676686..64c8e43 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -67,7 +67,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index b494547..0eab9b0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 2c2d524..a4e00af 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 0655c54..5e1eaba 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -75,7 +75,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py index fd0eba4..2acca00 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index 97c423f..fd7d447 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py index 9cb1518..de71e60 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py index 641dc7e..425f2b4 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index c57c710..0a72ec0 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py index 3abf7d1..4e09fe1 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -76,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py index ba3e42d..c36225c 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index 42d30a5..e21325f 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py index c0ecd43..e82d0dc 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 8ec11c3..6aaf19b 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index 6493a27..8acfc1a 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -59,7 +59,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: diff --git a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py index 160f875..46184eb 100644 --- a/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py +++ b/nwb_models/src/nwb_models/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -77,7 +77,7 @@ class ConfiguredBaseModel(BaseModel): @field_validator("*", mode="wrap") @classmethod def cast_with_value(cls, v: Any, handler, info) -> Any: - """Try to rescue instantiation by casting into the model's value fiel""" + """Try to rescue instantiation by casting into the model's value field""" try: return handler(v) except Exception as e1: From 2ce136709b2ecd66d9f6114bd2721fd9c6b10654 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 30 Sep 2024 21:47:02 -0700 Subject: [PATCH 18/18] minor tidying --- nwb_linkml/src/nwb_linkml/io/hdf5.py | 16 ---------------- .../test_adapters/test_adapter_namespaces.py | 5 ----- 2 files changed, 21 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py index d46465f..23bd2fa 100644 --- a/nwb_linkml/src/nwb_linkml/io/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py @@ -171,23 +171,7 @@ def _load_node( del args[".specloc"] model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"]) - # try: return model(**args) - # except ValidationError as e1: - # # try to restack extra fields into ``value`` - # if "value" in model.model_fields: - # value_dict = { - # key: val for key, val in args.items() if key not in model.model_fields - # } - # for k in value_dict: - # del args[k] - # args["value"] = value_dict - # try: - # return model(**args) - # except Exception as e2: - # raise e2 from e1 - # else: - # raise e1 else: if "name" in args: diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py index 4c8de11..8af60e6 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py @@ -55,11 +55,6 @@ def test_roll_down_inheritance(): """ Classes should receive and override the properties of their parents when they have neurodata_type_inc - Args: - nwb_core_fixture: - - Returns: - """ parent_cls = Group( neurodata_type_def="Parent",