Merge pull request #14 from p2p-ld/roll-down
Some checks failed
Lint / Ruff Linting (push) Has been cancelled
Lint / Black Formatting (push) Has been cancelled
Lint / Check for spelling errors (push) Has been cancelled
Model Rebuild / build_models (push) Has been cancelled
Tests / test (3.10) (push) Has been cancelled
Tests / test (3.11) (push) Has been cancelled
Tests / test (3.12) (push) Has been cancelled
Tests / finish-coverage (push) Has been cancelled

roll down parent inheritance recursively
This commit is contained in:
Jonny Saunders 2024-09-30 21:51:23 -07:00 committed by GitHub
commit ae37db3a41
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
315 changed files with 21901 additions and 5991 deletions

View file

@ -46,6 +46,10 @@ jobs:
run: pytest
working-directory: nwb_linkml
- name: Run nwb_schema_language Tests
run: pytest
working-directory: nwb_schema_language
- name: Coveralls Parallel
uses: coverallsapp/github-action@v2.3.0
if: runner.os != 'macOS'

View file

@ -53,6 +53,9 @@ Loading
- [ ] Top-level containers are still a little janky, eg. how `ProcessingModule` just accepts
extra args rather than properly abstracting `value` as a `__getitem__(self, key) -> T:`
Changes to linkml
- [ ] Allow parameterizing "extra" fields, so we don't have to stuff things into `value` dicts
## Docs TODOs
```{todolist}

View file

@ -5,7 +5,7 @@
groups = ["default", "dev", "plot", "tests"]
strategy = ["inherit_metadata"]
lock_version = "4.5.0"
content_hash = "sha256:1c297e11f6dc9e4f6b8d29df872177d2ce65bbd334c0b65aa5175dfb125c4d9f"
content_hash = "sha256:14dd3d0b396dc25e554b924825664346d2644f265e48346180f1cfdf833a8c92"
[[metadata.targets]]
requires_python = ">=3.10,<3.13"
@ -1038,9 +1038,9 @@ files = [
[[package]]
name = "numpydantic"
version = "1.3.3"
version = "1.6.0"
requires_python = "<4.0,>=3.9"
summary = "Type and shape validation and serialization for numpy arrays in pydantic models"
summary = "Type and shape validation and serialization for arbitrary array types in pydantic models"
groups = ["default"]
dependencies = [
"numpy>=1.24.0",
@ -1048,13 +1048,13 @@ dependencies = [
"typing-extensions>=4.11.0; python_version < \"3.11\"",
]
files = [
{file = "numpydantic-1.3.3-py3-none-any.whl", hash = "sha256:e002767252b1b77abb7715834ab7cbf58964baddae44863710f09e71b23287e4"},
{file = "numpydantic-1.3.3.tar.gz", hash = "sha256:1cc2744f7b5fbcecd51a64fafaf8c9a564bb296336a566a16be97ba7b1c28698"},
{file = "numpydantic-1.6.0-py3-none-any.whl", hash = "sha256:72f3ef0bc8a5801bac6fb79920467d763d51cddec8476875efeb5064c11c04cf"},
{file = "numpydantic-1.6.0.tar.gz", hash = "sha256:9785ba7eb5489b9e5438109e9b2dcd1cc0aa87d1b6b5df71fb906dc0708df83c"},
]
[[package]]
name = "nwb-models"
version = "0.1.0"
version = "0.2.0"
requires_python = ">=3.10"
summary = "Pydantic/LinkML models for Neurodata Without Borders"
groups = ["default"]
@ -1064,23 +1064,23 @@ dependencies = [
"pydantic>=2.3.0",
]
files = [
{file = "nwb_models-0.1.0-py3-none-any.whl", hash = "sha256:d485422865f6762586e8f8389d67bce17a3e66d07f6273385a751145afbbbfea"},
{file = "nwb_models-0.1.0.tar.gz", hash = "sha256:3c3ccfc6c2ac03dffe26ba7f180aecc650d6593c05d4f306f84b90fabc3ff2b8"},
{file = "nwb_models-0.2.0-py3-none-any.whl", hash = "sha256:72bb8a8879261488071d4e8eff35f2cbb20c44ac4bb7f67806c6329b4f8b2068"},
{file = "nwb_models-0.2.0.tar.gz", hash = "sha256:7e7f280378c668e1695dd9d53b32073d85615e90fee0ec417888dd83bdb9cbb3"},
]
[[package]]
name = "nwb-schema-language"
version = "0.1.3"
requires_python = ">=3.9,<4.0"
version = "0.2.0"
requires_python = "<3.13,>=3.10"
summary = "Translation of the nwb-schema-language to LinkML"
groups = ["default"]
dependencies = [
"linkml-runtime<2.0.0,>=1.1.24",
"pydantic<3.0.0,>=2.3.0",
"linkml-runtime>=1.7.7",
"pydantic>=2.3.0",
]
files = [
{file = "nwb_schema_language-0.1.3-py3-none-any.whl", hash = "sha256:2eb86aac6614d490f7ec3fa68634bb9dceb3834d9820f5afc5645a9f3b0c3401"},
{file = "nwb_schema_language-0.1.3.tar.gz", hash = "sha256:ad290e2896a9cde7e2f353bc3b8ddf42be865238d991167d397ff2e0d03c88ba"},
{file = "nwb_schema_language-0.2.0-py3-none-any.whl", hash = "sha256:354afb0abfbc61a6d6b227695b9a4312df5030f2746b517fc5849ac085c8e5f2"},
{file = "nwb_schema_language-0.2.0.tar.gz", hash = "sha256:59beda56ea52a55f4514d7e4b73e30ceaee1c60b7ddf4fc80afd48777acf9e50"},
]
[[package]]

View file

@ -12,7 +12,7 @@ dependencies = [
"nwb-models>=0.2.0",
"pyyaml>=6.0",
"linkml-runtime>=1.7.7",
"nwb-schema-language>=0.1.3",
"nwb-schema-language>=0.2.0",
"rich>=13.5.2",
#"linkml>=1.7.10",
"linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml",
@ -22,7 +22,7 @@ dependencies = [
"pydantic-settings>=2.0.3",
"tqdm>=4.66.1",
'typing-extensions>=4.12.2;python_version<"3.11"',
"numpydantic>=1.5.0",
"numpydantic>=1.6.0",
"black>=24.4.2",
"pandas>=2.2.2",
"networkx>=3.3",

View file

@ -17,9 +17,10 @@ from linkml_runtime.linkml_model import (
SlotDefinition,
TypeDefinition,
)
from pydantic import BaseModel
from pydantic import BaseModel, PrivateAttr
from nwb_linkml.logging import init_logger
from nwb_linkml.maps.dtype import float_types, integer_types, string_types
from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema
if sys.version_info.minor >= 11:
@ -103,6 +104,7 @@ class Adapter(BaseModel):
_logger: Optional[Logger] = None
_debug: Optional[bool] = None
_nwb_classes: dict[str, Dataset | Group] = PrivateAttr(default_factory=dict)
@property
def debug(self) -> bool:
@ -135,7 +137,10 @@ class Adapter(BaseModel):
Convenience wrapper around :meth:`.walk_field_values`
"""
return next(self.walk_field_values(self, "neurodata_type_def", name))
if name not in self._nwb_classes:
cls = next(self.walk_field_values(self, "neurodata_type_def", name))
self._nwb_classes[name] = cls
return self._nwb_classes[name]
def get_model_with_field(self, field: str) -> Generator[Union[Group, Dataset], None, None]:
"""
@ -170,6 +175,10 @@ class Adapter(BaseModel):
# so skip to avoid combinatoric walking
if key == "imports" and type(input).__name__ == "SchemaAdapter":
continue
# nwb_schema_language objects have a reference to their parent,
# which causes cycles
if key == "parent":
continue
val = getattr(input, key)
yield (key, val)
if isinstance(val, (BaseModel, dict, list)):
@ -300,5 +309,85 @@ def has_attrs(cls: Dataset) -> bool:
return (
cls.attributes is not None
and len(cls.attributes) > 0
and all([not a.value for a in cls.attributes])
and any([not a.value for a in cls.attributes])
)
def defaults(cls: Dataset | Attribute) -> dict:
"""
Handle default values -
* If ``value`` is present, yield `equals_string` or `equals_number` depending on dtype
**as well as** an ``ifabsent`` value - we both constrain the possible values to 1
and also supply it as the default
* else, if ``default_value`` is present, yield an appropriate ``ifabsent`` value
* If neither, yield an empty dict
Unlike nwb_schema_language, when ``value`` is set, we yield both a ``equals_*`` constraint
and an ``ifabsent`` constraint, because an ``equals_*`` can be declared without a default
in order to validate that a value is correctly set as the constrained value, and fail
if a value isn't provided.
"""
ret = {}
if cls.value:
if cls.dtype in integer_types:
ret["equals_number"] = cls.value
ret["ifabsent"] = f"integer({cls.value})"
elif cls.dtype in float_types:
ret["equals_number"] = cls.value
ret["ifabsent"] = f"float({cls.value})"
elif cls.dtype in string_types:
ret["equals_string"] = cls.value
ret["ifabsent"] = f"string({cls.value})"
else:
ret["equals_string"] = cls.value
ret["ifabsent"] = cls.value
elif cls.default_value:
if cls.dtype in string_types:
ret["ifabsent"] = f"string({cls.default_value})"
elif cls.dtype in integer_types:
ret["ifabsent"] = f"int({cls.default_value})"
elif cls.dtype in float_types:
ret["ifabsent"] = f"float({cls.default_value})"
else:
ret["ifabsent"] = cls.default_value
return ret
def is_container(group: Group) -> bool:
"""
Check if a group is a container group.
i.e. a group that...
* has no name
* multivalued quantity
* has a ``neurodata_type_inc``
* has no ``neurodata_type_def``
* has no sub-groups
* has no datasets
* has no attributes
Examples:
.. code-block:: yaml
- name: templates
groups:
- neurodata_type_inc: TimeSeries
doc: TimeSeries objects containing template data of presented stimuli.
quantity: '*'
- neurodata_type_inc: Images
doc: Images objects containing images of presented stimuli.
quantity: '*'
"""
return (
not group.name
and group.quantity == "*"
and group.neurodata_type_inc
and not group.neurodata_type_def
and not group.datasets
and not group.groups
and not group.attributes
)

View file

@ -7,26 +7,13 @@ from typing import ClassVar, Optional, Type, TypedDict
from linkml_runtime.linkml_model.meta import SlotDefinition
from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d
from nwb_linkml.adapters.adapter import Adapter, BuildResult, defaults, is_1d
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.maps import Map
from nwb_linkml.maps.dtype import handle_dtype, inlined
from nwb_schema_language import Attribute
def _make_ifabsent(val: str | int | float | None) -> str | None:
if val is None:
return None
elif isinstance(val, str):
return f"string({val})"
elif isinstance(val, int):
return f"integer({val})"
elif isinstance(val, float):
return f"float({val})"
else:
return str(val)
class AttrDefaults(TypedDict):
"""Default fields for an attribute"""
@ -38,31 +25,6 @@ class AttrDefaults(TypedDict):
class AttributeMap(Map):
"""Base class for attribute mapping transformations :)"""
@classmethod
def handle_defaults(cls, attr: Attribute) -> AttrDefaults:
"""
Construct arguments for linkml slot default metaslots from nwb schema lang attribute props
"""
equals_string = None
equals_number = None
default_value = None
if attr.value:
if isinstance(attr.value, (int, float)):
equals_number = attr.value
elif attr.value:
equals_string = str(attr.value)
if equals_number:
default_value = _make_ifabsent(equals_number)
elif equals_string:
default_value = _make_ifabsent(equals_string)
elif attr.default_value:
default_value = _make_ifabsent(attr.default_value)
return AttrDefaults(
equals_string=equals_string, equals_number=equals_number, ifabsent=default_value
)
@classmethod
@abstractmethod
def check(cls, attr: Attribute) -> bool:
@ -105,7 +67,7 @@ class MapScalar(AttributeMap):
description=attr.doc,
required=attr.required,
inlined=inlined(attr.dtype),
**cls.handle_defaults(attr),
**defaults(attr),
)
return BuildResult(slots=[slot])
@ -154,7 +116,7 @@ class MapArray(AttributeMap):
required=attr.required,
inlined=inlined(attr.dtype),
**expressions,
**cls.handle_defaults(attr),
**defaults(attr),
)
return BuildResult(slots=[slot])

View file

@ -7,7 +7,7 @@ from typing import ClassVar, Optional, Type
from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition
from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.adapter import BuildResult, defaults, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
@ -59,9 +59,7 @@ class MapScalar(DatasetMap):
slots:
- name: MyScalar
description: A scalar
multivalued: false
range: int32
required: false
"""
@ -108,6 +106,7 @@ class MapScalar(DatasetMap):
description=cls.doc,
range=handle_dtype(cls.dtype),
**QUANTITY_MAP[cls.quantity],
**defaults(cls),
)
res = BuildResult(slots=[this_slot])
return res
@ -208,7 +207,19 @@ class MapScalarAttributes(DatasetMap):
"""
Map to a scalar attribute with an adjoining "value" slot
"""
value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True)
# the *value slot* within the generated class is always required,
# but the slot in the parent class referring to this one will indicate whether the whole
# thing is optional or not. You can't provide the attributes of the optional dataset
# without providing its value
quantity = QUANTITY_MAP[cls.quantity].copy()
quantity["required"] = True
value_slot = SlotDefinition(
name="value",
range=handle_dtype(cls.dtype),
**quantity,
**defaults(cls),
)
res.classes[0].attributes["value"] = value_slot
return res
@ -616,7 +627,8 @@ class MapNVectors(DatasetMap):
DynamicTable (and the slot VectorData where this is called for)
is handled specially and just dropped, because we handle the possibility for
arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes.
arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes
(see :class:`.MapNVectorData` ).
So really this is just a handler for the `Images` case
"""
@ -652,6 +664,40 @@ class MapNVectors(DatasetMap):
return res
class MapNVectorData(DatasetMap):
"""
An extremely special case just for DynamicTable:
DynamicTable indicates that all of its extra columns are ``VectorData`` with an
unnamed, * quantity dataset similar to the case of :class:`.MapNVectors` .
We handle this with the :mod:`.includes.hdmf` module mixin classes instead,
and so to avoid generating a pointless slot and class,
we just catch that case and return nothing.
"""
@classmethod
def check(c, cls: Dataset) -> bool:
"""
Check for being an unnamed multivalued vector class that IS VectorData
"""
return (
cls.name is None
and cls.neurodata_type_def is None
and cls.neurodata_type_inc
and cls.neurodata_type_inc == "VectorData"
and cls.quantity in ("*", "+")
)
@classmethod
def apply(
c, cls: Dataset, res: Optional[BuildResult] = None, name: Optional[str] = None
) -> BuildResult:
"""
Return ... nothing
"""
return BuildResult()
class MapCompoundDtype(DatasetMap):
"""
A ``dtype`` declared as an array of types that function effectively as a row in a table.

View file

@ -2,11 +2,11 @@
Adapter for NWB groups to linkml Classes
"""
from typing import List, Type
from typing import Type
from linkml_runtime.linkml_model import SlotDefinition
from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.adapters.adapter import BuildResult, is_container
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.maps import QUANTITY_MAP
@ -29,7 +29,7 @@ class GroupAdapter(ClassAdapter):
"""
# Handle container groups with only * quantity unnamed groups
if (
len(self.cls.groups) > 0
self.cls.groups
and not self.cls.links
and all([self._check_if_container(g) for g in self.cls.groups])
): # and \
@ -38,26 +38,28 @@ class GroupAdapter(ClassAdapter):
# handle if we are a terminal container group without making a new class
if (
len(self.cls.groups) == 0
and len(self.cls.datasets) == 0
not self.cls.groups
and not self.cls.datasets
and self.cls.neurodata_type_inc is not None
and self.parent is not None
):
return self.handle_container_slot(self.cls)
nested_res = self.build_subclasses()
# add links
links = self.build_links()
nested_res = self.build_datasets()
nested_res += self.build_groups()
nested_res += self.build_links()
nested_res += self.build_containers()
nested_res += self.build_special_cases()
# we don't propagate slots up to the next level since they are meant for this
# level (ie. a way to refer to our children)
res = self.build_base(extra_attrs=nested_res.slots + links)
res = self.build_base(extra_attrs=nested_res.slots)
# we do propagate classes tho
res.classes.extend(nested_res.classes)
return res
def build_links(self) -> List[SlotDefinition]:
def build_links(self) -> BuildResult:
"""
Build links specified in the ``links`` field as slots that refer to other
classes, with an additional annotation specifying that they are in fact links.
@ -66,7 +68,7 @@ class GroupAdapter(ClassAdapter):
file hierarchy as a string.
"""
if not self.cls.links:
return []
return BuildResult()
annotations = [{"tag": "source_type", "value": "link"}]
@ -83,7 +85,7 @@ class GroupAdapter(ClassAdapter):
)
for link in self.cls.links
]
return slots
return BuildResult(slots=slots)
def handle_container_group(self, cls: Group) -> BuildResult:
"""
@ -129,7 +131,7 @@ class GroupAdapter(ClassAdapter):
# We are a top-level container class like ProcessingModule
base = self.build_base()
# remove all the attributes and replace with child slot
base.classes[0].attributes = [slot]
base.classes[0].attributes.update({slot.name: slot})
return base
def handle_container_slot(self, cls: Group) -> BuildResult:
@ -167,28 +169,88 @@ class GroupAdapter(ClassAdapter):
return BuildResult(slots=[slot])
def build_subclasses(self) -> BuildResult:
def build_datasets(self) -> BuildResult:
"""
Build nested groups and datasets
Create ClassDefinitions for each, but then also create SlotDefinitions that
will be used as attributes linking the main class to the subclasses
Datasets are simple, they are terminal classes, and all logic
for creating slots vs. classes is handled by the adapter class
"""
# Datasets are simple, they are terminal classes, and all logic
# for creating slots vs. classes is handled by the adapter class
dataset_res = BuildResult()
for dset in self.cls.datasets:
dset_adapter = DatasetAdapter(cls=dset, parent=self)
dataset_res += dset_adapter.build()
if self.cls.datasets:
for dset in self.cls.datasets:
dset_adapter = DatasetAdapter(cls=dset, parent=self)
dataset_res += dset_adapter.build()
return dataset_res
def build_groups(self) -> BuildResult:
"""
Build subgroups, excluding pure container subgroups
"""
group_res = BuildResult()
for group in self.cls.groups:
group_adapter = GroupAdapter(cls=group, parent=self)
group_res += group_adapter.build()
if self.cls.groups:
for group in self.cls.groups:
if is_container(group):
continue
group_adapter = GroupAdapter(cls=group, parent=self)
group_res += group_adapter.build()
res = dataset_res + group_res
return group_res
def build_containers(self) -> BuildResult:
"""
Build all container types into a single ``value`` slot
"""
res = BuildResult()
if not self.cls.groups:
return res
containers = [grp for grp in self.cls.groups if is_container(grp)]
if not containers:
return res
if len(containers) == 1:
range = {"range": containers[0].neurodata_type_inc}
description = containers[0].doc
else:
range = {"any_of": [{"range": subcls.neurodata_type_inc} for subcls in containers]}
description = "\n\n".join([grp.doc for grp in containers])
slot = SlotDefinition(
name="value",
multivalued=True,
inlined=True,
inlined_as_list=False,
description=description,
**range,
)
if self.debug: # pragma: no cover - only used in development
slot.annotations["group_adapter"] = {
"tag": "slot_adapter",
"value": "container_value_slot",
}
res.slots = [slot]
return res
def build_special_cases(self) -> BuildResult:
"""
Special cases, at this point just for NWBFile, which has
extra ``.specloc`` and ``specifications`` attrs
"""
res = BuildResult()
if self.cls.neurodata_type_def == "NWBFile":
res.slots = [
SlotDefinition(
name="specifications",
range="dict",
description="Nested dictionary of schema specifications",
),
]
return res
def build_self_slot(self) -> SlotDefinition:

View file

@ -8,8 +8,7 @@ for extracting information and generating translated schema
import contextlib
from copy import copy
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
from typing import Dict, Generator, List, Optional
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import Annotation, SchemaDefinition
@ -19,7 +18,7 @@ from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.lang_elements import NwbLangSchema
from nwb_linkml.ui import AdapterProgress
from nwb_schema_language import Namespaces
from nwb_schema_language import Dataset, Group, Namespaces
class NamespacesAdapter(Adapter):
@ -31,6 +30,9 @@ class NamespacesAdapter(Adapter):
schemas: List[SchemaAdapter]
imported: List["NamespacesAdapter"] = Field(default_factory=list)
_completed: bool = False
"""whether we have run the :meth:`.complete_namespace` method"""
@classmethod
def from_yaml(cls, path: Path) -> "NamespacesAdapter":
"""
@ -65,7 +67,7 @@ class NamespacesAdapter(Adapter):
needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns)
ns_adapter.imported.append(needed_adapter)
ns_adapter.populate_imports()
ns_adapter.complete_namespaces()
return ns_adapter
@ -76,6 +78,9 @@ class NamespacesAdapter(Adapter):
Build the NWB namespace to the LinkML Schema
"""
if not self._completed:
self.complete_namespaces()
sch_result = BuildResult()
for sch in self.schemas:
if progress is not None:
@ -149,45 +154,143 @@ class NamespacesAdapter(Adapter):
break
return self
def find_type_source(self, name: str) -> SchemaAdapter:
def complete_namespaces(self) -> None:
"""
Given some neurodata_type_inc, find the schema that it's defined in.
After loading the namespace, and after any imports have been added afterwards,
this must be called to complete the definitions of the contained schema objects.
Rather than returning as soon as a match is found, check all
This is not automatic because NWB doesn't have a formal dependency resolution system,
so it is often impossible to know which imports are needed until after the namespace
adapter has been instantiated.
It **is** automatically called if it hasn't been already by the :meth:`.build` method.
"""
# First check within the main schema
internal_matches = []
for schema in self.schemas:
class_names = [cls.neurodata_type_def for cls in schema.created_classes]
if name in class_names:
internal_matches.append(schema)
self._populate_imports()
self._roll_down_inheritance()
if len(internal_matches) > 1:
raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(internal_matches)}"
)
elif len(internal_matches) == 1:
return internal_matches[0]
for i in self.imported:
i.complete_namespaces()
import_matches = []
for imported_ns in self.imported:
for schema in imported_ns.schemas:
class_names = [cls.neurodata_type_def for cls in schema.created_classes]
if name in class_names:
import_matches.append(schema)
self._completed = True
if len(import_matches) > 1:
raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
)
elif len(import_matches) == 1:
return import_matches[0]
def _roll_down_inheritance(self) -> None:
"""
nwb-schema-language inheritance doesn't work like normal python inheritance -
instead of inheriting everything at the 'top level' of a class, it also
recursively merges all properties from the parent objects.
While this operation does not take care to modify classes in a way that respect their order
(i.e. roll down ancestor classes first, in order, before the leaf classes),
it doesn't matter - this method should be both idempotent and order insensitive
for a given source schema.
References:
https://github.com/NeurodataWithoutBorders/pynwb/issues/1954
"""
for cls in self.walk_types(self, (Group, Dataset)):
if not cls.neurodata_type_inc:
continue
parents = self._get_class_ancestors(cls, include_child=True)
# merge and cast
new_cls: dict = {}
for i, parent in enumerate(parents):
# we want a full roll-down of all the ancestor classes,
# but we make an abbreviated leaf class
complete = i != len(parents) - 1
new_cls = roll_down_nwb_class(new_cls, parent, complete=complete)
new_cls: Group | Dataset = type(cls)(**new_cls)
new_cls.parent = cls.parent
# reinsert
self._overwrite_class(new_cls, cls)
def _get_class_ancestors(
self, cls: Dataset | Group, include_child: bool = True
) -> list[Dataset | Group]:
"""
Get the chain of ancestor classes inherited via ``neurodata_type_inc``
Args:
cls (:class:`.Dataset` | :class:`.Group`): The class to get ancestors of
include_child (bool): If ``True`` (default), include ``cls`` in the output list
"""
parent = self.get(cls.neurodata_type_inc)
parents = [parent]
while parent.neurodata_type_inc:
parent = self.get(parent.neurodata_type_inc)
parents.insert(0, parent)
if include_child:
parents.append(cls)
return parents
def _overwrite_class(self, new_cls: Dataset | Group, old_cls: Dataset | Group) -> None:
"""
Overwrite the version of a dataset or group that is stored in our schemas
"""
if old_cls.parent:
if isinstance(old_cls, Dataset):
new_cls.parent.datasets[new_cls.parent.datasets.index(old_cls)] = new_cls
else:
new_cls.parent.groups[new_cls.parent.groups.index(old_cls)] = new_cls
else:
raise KeyError(f"No schema found that define {name}")
# top level class, need to go and find it
schema = self.find_type_source(old_cls)
if isinstance(new_cls, Dataset):
schema.datasets[schema.datasets.index(old_cls)] = new_cls
else:
schema.groups[schema.groups.index(old_cls)] = new_cls
def populate_imports(self) -> "NamespacesAdapter":
def find_type_source(self, cls: str | Dataset | Group, fast: bool = False) -> SchemaAdapter:
"""
Given some type (as `neurodata_type_def`), find the schema that it's defined in.
Rather than returning as soon as a match is found, ensure that duplicates are
not found within the primary schema, then so the same for all imported schemas.
Args:
cls (str | :class:`.Dataset` | :class:`.Group`): The ``neurodata_type_def``
to look for the source of. If a Dataset or Group, look for the object itself
(cls in schema.datasets), otherwise look for a class with a matching name.
fast (bool): If ``True``, return as soon as a match is found.
If ``False`, return after checking all schemas for duplicates.
Returns:
:class:`.SchemaAdapter`
Raises:
KeyError: if multiple schemas or no schemas are found
"""
matches = []
for schema in self.all_schemas():
in_schema = False
if (
isinstance(cls, str)
and cls in [c.neurodata_type_def for c in schema.created_classes]
or isinstance(cls, Dataset)
and cls in schema.datasets
or isinstance(cls, Group)
and cls in schema.groups
):
in_schema = True
if in_schema:
if fast:
return schema
else:
matches.append(schema)
if len(matches) > 1:
raise KeyError(f"Found multiple schemas in namespace that define {cls}:\n{matches}")
elif len(matches) == 1:
return matches[0]
else:
raise KeyError(f"No schema found that define {cls}")
def _populate_imports(self) -> "NamespacesAdapter":
"""
Populate the imports that are needed for each schema file
@ -279,3 +382,109 @@ class NamespacesAdapter(Adapter):
if name in sources:
return ns.name
return None
def all_schemas(self) -> Generator[SchemaAdapter, None, None]:
"""
Iterator over all schemas including imports
"""
for sch in self.schemas:
yield sch
for imported in self.imported:
for sch in imported.schemas:
yield sch
def roll_down_nwb_class(
source: Group | Dataset | dict, target: Group | Dataset | dict, complete: bool = False
) -> dict:
"""
Merge an ancestor (via ``neurodata_type_inc`` ) source class with a
child ``target`` class.
On the first recursive pass, only those values that are set on the target are copied from the
source class - this isn't a true merging, what we are after is to recursively merge all the
values that are modified in the child class with those of the parent class below the top level,
the top-level attributes will be carried through via normal inheritance.
Rather than re-instantiating the child class, we return the dictionary so that this
function can be used in series to merge a whole ancestry chain within
:class:`.NamespacesAdapter` , but merging isn't exposed in the function since
ancestor class definitions can be spread out over many schemas,
and we need the orchestration of the adapter to have them in all cases we'd be using this.
Args:
source (dict): source dictionary
target (dict): target dictionary (values merged over source)
complete (bool): (default ``False``)do a complete merge, merging everything
from source to target without trying to minimize redundancy.
Used to collapse ancestor classes before the terminal class.
References:
https://github.com/NeurodataWithoutBorders/pynwb/issues/1954
"""
if isinstance(source, (Group, Dataset)):
source = source.model_dump(exclude_none=True)
if isinstance(target, (Group, Dataset)):
target = target.model_dump(exclude_none=True)
exclude = ("neurodata_type_def",)
# if we are on the first recursion, we exclude top-level items that are not set in the target
if complete:
ret = {k: v for k, v in source.items() if k not in exclude}
else:
ret = {k: v for k, v in source.items() if k not in exclude and k in target}
for key, value in target.items():
if key not in ret:
ret[key] = value
elif isinstance(value, dict):
if key in ret:
ret[key] = roll_down_nwb_class(ret[key], value, complete=True)
else:
ret[key] = value
elif isinstance(value, list) and all([isinstance(v, dict) for v in value]):
src_keys = {v["name"]: ret[key].index(v) for v in ret.get(key, {}) if "name" in v}
target_keys = {v["name"]: value.index(v) for v in value if "name" in v}
new_val = []
# screwy double iteration to preserve dict order
# all dicts not in target, if in depth > 0
if complete:
new_val.extend(
[
ret[key][src_keys[k]]
for k in src_keys
if k in set(src_keys.keys()) - set(target_keys.keys())
]
)
# all dicts not in source
new_val.extend(
[
value[target_keys[k]]
for k in target_keys
if k in set(target_keys.keys()) - set(src_keys.keys())
]
)
# merge dicts in both
new_val.extend(
[
roll_down_nwb_class(ret[key][src_keys[k]], value[target_keys[k]], complete=True)
for k in target_keys
if k in set(src_keys.keys()).intersection(set(target_keys.keys()))
]
)
new_val = sorted(new_val, key=lambda i: i["name"])
# add any dicts that don't have the list_key
# they can't be merged since they can't be matched
if complete:
new_val.extend([v for v in ret.get(key, {}) if "name" not in v])
new_val.extend([v for v in value if "name" not in v])
ret[key] = new_val
else:
ret[key] = value
return ret

View file

@ -9,13 +9,13 @@ import re
from dataclasses import dataclass, field
from pathlib import Path
from types import ModuleType
from typing import Callable, ClassVar, Dict, List, Literal, Optional, Tuple
from typing import Callable, ClassVar, Dict, List, Optional, Tuple
from linkml.generators import PydanticGenerator
from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray
from linkml.generators.pydanticgen.build import ClassResult, SlotResult
from linkml.generators.pydanticgen.pydanticgen import SplitMode
from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule
from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport, PydanticModule
from linkml_runtime.linkml_model.meta import (
ArrayExpression,
SchemaDefinition,
@ -26,8 +26,10 @@ from linkml_runtime.utils.formatutils import remove_empty_items
from linkml_runtime.utils.schemaview import SchemaView
from nwb_linkml.includes.base import (
BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD,
BASEMODEL_COERCE_VALUE,
BASEMODEL_EXTRA_TO_VALUE,
BASEMODEL_GETITEM,
)
from nwb_linkml.includes.hdmf import (
@ -55,10 +57,17 @@ class NWBPydanticGenerator(PydanticGenerator):
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
BASEMODEL_GETITEM,
BASEMODEL_COERCE_VALUE,
BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD,
BASEMODEL_EXTRA_TO_VALUE,
)
split: bool = True
imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")])
imports: list[Import] = field(
default_factory=lambda: [
Import(module="numpy", alias="np"),
Import(module="pydantic", objects=[ObjectImport(name="model_validator")]),
]
)
schema_map: Optional[Dict[str, SchemaDefinition]] = None
"""See :meth:`.LinkMLProvider.build` for usage - a list of specific versions to import from"""
@ -70,7 +79,7 @@ class NWBPydanticGenerator(PydanticGenerator):
emit_metadata: bool = True
gen_classvars: bool = True
gen_slots: bool = True
extra_fields: Literal["allow", "forbid", "ignore"] = "allow"
# extra_fields: Literal["allow", "forbid", "ignore"] = "allow"
skip_meta: ClassVar[Tuple[str]] = ("domain_of", "alias")
@ -136,7 +145,7 @@ class NWBPydanticGenerator(PydanticGenerator):
"""Customize dynamictable behavior"""
cls = AfterGenerateClass.inject_dynamictable(cls)
cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv)
cls = AfterGenerateClass.inject_elementidentifiers(cls, sv, self._get_element_import)
cls = AfterGenerateClass.inject_dynamictable_imports(cls, sv, self._get_element_import)
cls = AfterGenerateClass.strip_vector_data_slots(cls, sv)
return cls
@ -267,7 +276,7 @@ class AfterGenerateClass:
"""
if cls.cls.name == "DynamicTable":
cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"]
cls.cls.bases = ["DynamicTableMixin"]
if (
cls.injected_classes is None
@ -285,18 +294,18 @@ class AfterGenerateClass:
else: # pragma: no cover - for completeness, shouldn't happen
cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
elif cls.cls.name == "VectorData":
cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"]
cls.cls.bases = ["VectorDataMixin"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
elif cls.cls.name == "VectorIndex":
cls.cls.bases = ["VectorIndexMixin", "ConfiguredBaseModel"]
cls.cls.bases = ["VectorIndexMixin"]
elif cls.cls.name == "DynamicTableRegion":
cls.cls.bases = ["DynamicTableRegionMixin", "VectorData", "ConfiguredBaseModel"]
cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"]
elif cls.cls.name == "AlignedDynamicTable":
cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"]
elif cls.cls.name == "ElementIdentifiers":
cls.cls.bases = ["ElementIdentifiersMixin", "Data", "ConfiguredBaseModel"]
cls.cls.bases = ["ElementIdentifiersMixin", "Data"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
@ -346,19 +355,22 @@ class AfterGenerateClass:
return cls
@staticmethod
def inject_elementidentifiers(
def inject_dynamictable_imports(
cls: ClassResult, sv: SchemaView, import_method: Callable[[str], Import]
) -> ClassResult:
"""
Inject ElementIdentifiers into module that define dynamictables -
needed to handle ID columns
Ensure that schema that contain dynamictables have all the imports needed to use them
"""
if (
cls.source.is_a == "DynamicTable"
or "DynamicTable" in sv.class_ancestors(cls.source.name)
) and sv.schema.name != "hdmf-common.table":
imp = import_method("ElementIdentifiers")
cls.imports += [imp]
imp = [
import_method("ElementIdentifiers"),
import_method("VectorData"),
import_method("VectorIndex"),
]
cls.imports += imp
return cls
@staticmethod

View file

@ -3,7 +3,7 @@ Modifications to the ConfiguredBaseModel used by all generated classes
"""
BASEMODEL_GETITEM = """
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
\"\"\"Try and get a value from value or "data" if we have it\"\"\"
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -16,7 +16,7 @@ BASEMODEL_GETITEM = """
BASEMODEL_COERCE_VALUE = """
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
\"\"\"Try to rescue instantiation by using the value field\"\"\"
try:
return handler(v)
@ -30,6 +30,20 @@ BASEMODEL_COERCE_VALUE = """
raise e1
"""
BASEMODEL_CAST_WITH_VALUE = """
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
\"\"\"Try to rescue instantiation by casting into the model's value field\"\"\"
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
"""
BASEMODEL_COERCE_CHILD = """
@field_validator("*", mode="before")
@classmethod
@ -41,9 +55,36 @@ BASEMODEL_COERCE_CHILD = """
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
"""
BASEMODEL_EXTRA_TO_VALUE = """
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
\"\"\"
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
\"\"\"
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key:val for key,val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
"""

View file

@ -39,8 +39,30 @@ if TYPE_CHECKING: # pragma: no cover
T = TypeVar("T", bound=NDArray)
T_INJECT = 'T = TypeVar("T", bound=NDArray)'
if "pytest" in sys.modules:
from nwb_models.models import ConfiguredBaseModel
else:
class DynamicTableMixin(BaseModel):
class ConfiguredBaseModel(BaseModel):
"""
Dummy ConfiguredBaseModel (without its methods from :mod:`.includes.base` )
used so that the injected mixins inherit from the `ConfiguredBaseModel`
and we get a linear inheritance MRO (rather than needing to inherit
from the mixins *and* the configured base model) so that the
model_config is correctly resolved (ie. to allow extra args)
"""
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
class DynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
@ -295,13 +317,19 @@ class DynamicTableMixin(BaseModel):
model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover
raise ValidationError.from_exception_data(
title=f"field {key} cannot be cast to VectorData from {val}",
title="cast_extra_columns",
line_errors=[
{
"type": "ValueError",
"loc": ("DynamicTableMixin", "cast_extra_columns"),
"type": "value_error",
"input": val,
}
"loc": ("DynamicTableMixin", "cast_extra_columns"),
"ctx": {
"error": ValueError(
f"field {key} cannot be cast to {to_cast} from {val}"
)
},
},
*e.errors(),
],
) from e
return model
@ -358,24 +386,27 @@ class DynamicTableMixin(BaseModel):
return handler(val)
except ValidationError as e:
annotation = cls.model_fields[info.field_name].annotation
if type(annotation).__name__ == "_UnionGenericAlias":
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
# should pass if we're supposed to be a VectorData column
# don't want to override intention here by insisting that it is
# *actually* a VectorData column in case an NDArray has been specified for now
description = cls.model_fields[info.field_name].description
description = description if description is not None else ""
return handler(
annotation(
val,
name=info.field_name,
description=cls.model_fields[info.field_name].description,
description=description,
)
)
except Exception:
raise e from None
class VectorDataMixin(BaseModel, Generic[T]):
class VectorDataMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorData indexing abilities
"""
@ -426,7 +457,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
return len(self.value)
class VectorIndexMixin(BaseModel, Generic[T]):
class VectorIndexMixin(ConfiguredBaseModel, Generic[T]):
"""
Mixin class to give VectorIndex indexing abilities
"""
@ -518,7 +549,7 @@ class VectorIndexMixin(BaseModel, Generic[T]):
return len(self.value)
class DynamicTableRegionMixin(BaseModel):
class DynamicTableRegionMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing references to regions of dynamictables
"""
@ -574,7 +605,7 @@ class DynamicTableRegionMixin(BaseModel):
) # pragma: no cover
class AlignedDynamicTableMixin(BaseModel):
class AlignedDynamicTableMixin(ConfiguredBaseModel):
"""
Mixin to allow indexing multiple tables that are aligned on a common ID
@ -927,12 +958,18 @@ if "pytest" in sys.modules:
class VectorData(VectorDataMixin):
"""VectorData subclass for testing"""
pass
name: str = Field(...)
description: str = Field(
..., description="""Description of what these vectors represent."""
)
class VectorIndex(VectorIndexMixin):
"""VectorIndex subclass for testing"""
pass
name: str = Field(...)
description: str = Field(
..., description="""Description of what these vectors represent."""
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""DynamicTableRegion subclass for testing"""

View file

@ -166,8 +166,13 @@ def _load_node(
raise TypeError(f"Nodes can only be h5py Datasets and Groups, got {obj}")
if "neurodata_type" in obj.attrs:
# SPECIAL CASE: ignore `.specloc`
if ".specloc" in args:
del args[".specloc"]
model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"])
return model(**args)
else:
if "name" in args:
del args["name"]

View file

@ -131,7 +131,7 @@ def load_namespace_adapter(
else:
adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
adapter.populate_imports()
adapter.complete_namespaces()
return adapter

View file

@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import (
TypeDefinition,
)
from nwb_linkml.maps import flat_to_linkml
from nwb_linkml.maps import flat_to_linkml, linkml_reprs
def _make_dtypes() -> List[TypeDefinition]:
@ -36,8 +36,13 @@ def _make_dtypes() -> List[TypeDefinition]:
name=nwbtype,
minimum_value=amin,
typeof=linkmltype, # repr=repr_string
repr=linkml_reprs.get(nwbtype, None),
)
DTypeTypes.append(atype)
# a dict type!
DTypeTypes.append(TypeDefinition(name="dict", repr="dict"))
return DTypeTypes

View file

@ -2,7 +2,7 @@
Mapping from one domain to another
"""
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np, linkml_reprs
from nwb_linkml.maps.map import Map
from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP
@ -14,4 +14,5 @@ __all__ = [
"Map",
"flat_to_linkml",
"flat_to_np",
"linkml_reprs",
]

View file

@ -39,6 +39,12 @@ flat_to_linkml = {
Map between the flat data types and the simpler linkml base types
"""
linkml_reprs = {"numeric": "float | int"}
"""
``repr`` fields used in the nwb language elements injected in every namespace
that give the nwb type a specific representation in the generated pydantic models
"""
flat_to_np = {
"float": float,
"float32": np.float32,
@ -66,6 +72,26 @@ flat_to_np = {
"isodatetime": np.datetime64,
}
integer_types = {
"long",
"int64",
"int",
"int32",
"int16",
"short",
"int8",
"uint",
"uint32",
"uint16",
"uint8",
"uint64",
}
float_types = {"float", "float32", "double", "float64", "numeric"}
string_types = {"text", "utf", "utf8", "utf_8", "ascii"}
np_to_python = {
Any: Any,
np.number: float,

View file

@ -9,10 +9,16 @@ We will handle cardinality of array dimensions elsewhere
"""
QUANTITY_MAP = {
"*": {"required": False, "multivalued": True},
"*": {"required": None, "multivalued": True},
"+": {"required": True, "multivalued": True},
"?": {"required": False, "multivalued": False},
1: {"required": True, "multivalued": False},
"?": {"required": None, "multivalued": None},
1: {"required": True, "multivalued": None},
# include the NoneType for indexing
None: {"required": None, "multivalued": None},
}
"""
Map between NWB quantity values and linkml quantity metaslot values.
Use ``None`` for defaults (required: False, multivalued: False) rather than ``False``
to avoid adding unnecessary attributes
"""

View file

@ -85,7 +85,7 @@ def make_node(
def make_graph(namespaces: "NamespacesAdapter", recurse: bool = True) -> List[CytoElement]:
namespaces.populate_imports()
namespaces.complete_namespaces()
nodes = []
element: Namespace | Group | Dataset
print("walking graph")

View file

@ -127,7 +127,7 @@ class LinkMLProvider(Provider):
for schema_needs in adapter.needed_imports.values():
for needed in schema_needs:
adapter.imported.append(ns_adapters[needed])
adapter.populate_imports()
adapter.complete_namespaces()
# then do the build
res = {}

View file

@ -54,7 +54,7 @@ def test_walk_field_values(nwb_core_fixture):
text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, "dtype", value="text"))
assert all([d.dtype == "text" for d in text_models])
# 135 known value from regex search
assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 135
assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 155
def test_build_result(linkml_schema_bare):

View file

@ -1,6 +1,9 @@
from pathlib import Path
import pytest
from nwb_linkml.adapters import SchemaAdapter
from nwb_linkml.adapters import NamespacesAdapter, SchemaAdapter
from nwb_schema_language import Attribute, Dataset, FlatDtype, Group, Namespace, Namespaces, Schema
@pytest.mark.parametrize(
@ -19,7 +22,7 @@ def test_find_type_source(nwb_core_fixture, class_name, schema_file, namespace_n
def test_populate_imports(nwb_core_fixture):
nwb_core_fixture.populate_imports()
nwb_core_fixture._populate_imports()
schema: SchemaAdapter
assert len(nwb_core_fixture.schemas) > 0
for schema in nwb_core_fixture.schemas:
@ -48,15 +51,109 @@ def test_skip_imports(nwb_core_fixture):
assert all([ns == "core" for ns in namespaces])
@pytest.mark.skip()
def test_populate_inheritance(nwb_core_fixture):
def test_roll_down_inheritance():
"""
Classes should receive and override the properties of their parents
when they have neurodata_type_inc
Args:
nwb_core_fixture:
Returns:
"""
pass
parent_cls = Group(
neurodata_type_def="Parent",
doc="parent",
attributes=[
Attribute(name="a", dims=["a", "b"], shape=[1, 2], doc="a", value="a"),
Attribute(name="b", dims=["c", "d"], shape=[3, 4], doc="b", value="b"),
],
datasets=[
Dataset(
name="data",
dims=["a", "b"],
shape=[1, 2],
doc="data",
attributes=[
Attribute(name="c", dtype=FlatDtype.int32, doc="c"),
Attribute(name="d", dtype=FlatDtype.int32, doc="d"),
],
)
],
)
parent_sch = Schema(source="parent.yaml")
parent_ns = Namespaces(
namespaces=[
Namespace(
author="hey",
contact="sup",
name="parent",
doc="a parent",
version="1",
schema=[parent_sch],
)
]
)
child_cls = Group(
neurodata_type_def="Child",
neurodata_type_inc="Parent",
doc="child",
attributes=[Attribute(name="a", doc="a", value="z")],
datasets=[
Dataset(
name="data",
doc="data again",
attributes=[Attribute(name="c", doc="c", value="z"), Attribute(name="e", doc="e")],
),
],
groups=[Group(name="untyped_child", neurodata_type_inc="Parent", doc="untyped child")],
)
child_sch = Schema(source="child.yaml")
child_ns = Namespaces(
namespaces=[
Namespace(
author="hey",
contact="sup",
name="child",
doc="a child",
version="1",
schema=[child_sch, Schema(namespace="parent")],
)
]
)
parent_schema_adapter = SchemaAdapter(path=Path("parent.yaml"), groups=[parent_cls])
parent_ns_adapter = NamespacesAdapter(namespaces=parent_ns, schemas=[parent_schema_adapter])
child_schema_adapter = SchemaAdapter(path=Path("child.yaml"), groups=[child_cls])
child_ns_adapter = NamespacesAdapter(
namespaces=child_ns, schemas=[child_schema_adapter], imported=[parent_ns_adapter]
)
child_ns_adapter.complete_namespaces()
child = child_ns_adapter.get("Child")
# overrides simple attrs
assert child.doc == "child"
# we don't receive attrs that aren't overridden in the child,
# instead we let python/linkml inheritance handle that for us
assert "b" not in [attr.name for attr in child.attributes]
# overrides values while preserving remaining values when set
attr_a = [attr for attr in child.attributes if attr.name == "a"][0]
assert attr_a.value == "z"
assert attr_a.dims == parent_cls.attributes[0].dims
assert [attr.value for attr in child.attributes if attr.name == "a"][0] == "z"
# preserve unset values in child datasets
assert child.datasets[0].dtype == parent_cls.datasets[0].dtype
assert child.datasets[0].dims == parent_cls.datasets[0].dims
# we *do* get undeclared attrs in child datasets,
# since those are not handled by python/linkml inheritance
assert "d" in [attr.name for attr in child.datasets[0].attributes]
# overrides set values in child datasets while preserving unset
c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0]
assert c_attr.value == "z"
assert c_attr.dtype == FlatDtype.int32
# preserves new attrs
assert "e" in [attr.name for attr in child.datasets[0].attributes]
# neurodata_type_def is not included in untyped children
assert child.groups[0].neurodata_type_def is None
# we don't set any of the attrs from the parent class here because we don't override them,
# so we don't need to merge them, and we don't want to clutter our linkml models unnecessarily
assert child.groups[0].attributes is None

View file

@ -114,14 +114,14 @@ def _icephys_stimulus_and_response(
n_samples = generator.integers(20, 50)
stimulus = VoltageClampStimulusSeries(
name=f"vcss_{i}",
data=VoltageClampStimulusSeriesData(value=[i] * n_samples),
data=VoltageClampStimulusSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}",
sweep_number=i,
electrode=electrode,
)
response = VoltageClampSeries(
name=f"vcs_{i}",
data=VoltageClampSeriesData(value=[i] * n_samples),
data=VoltageClampSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}",
electrode=electrode,
)

View file

@ -149,8 +149,8 @@ def test_dynamictable_mixin_colnames_index():
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(10)),
"new_col_2": hdmf.VectorData(value=np.arange(10)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
"new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
# explicit index with mismatching name
cols["weirdname_index"] = VectorIndexMixin(value=np.arange(10), target=cols["new_col_1"])
@ -171,9 +171,9 @@ def test_dynamictable_mixin_colnames_ordered():
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(10)),
"new_col_2": hdmf.VectorData(value=np.arange(10)),
"new_col_3": hdmf.VectorData(value=np.arange(10)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
"new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
"new_col_3": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
order = ["new_col_2", "existing_col", "new_col_1", "new_col_3"]
@ -198,7 +198,7 @@ def test_dynamictable_mixin_getattr():
class MyDT(DynamicTableMixin):
existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
col = hdmf.VectorData(value=np.arange(10))
col = hdmf.VectorData(name="existing_col", description="", value=np.arange(10))
inst = MyDT(existing_col=col)
# regular lookup for attrs that exist
@ -257,13 +257,17 @@ def test_dynamictable_resolve_index():
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(10)),
"new_col_2": hdmf.VectorData(value=np.arange(10)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
"new_col_2": hdmf.VectorData(name="new_col_2", description="", value=np.arange(10)),
}
# explicit index with mismatching name
cols["weirdname_index"] = hdmf.VectorIndex(value=np.arange(10), target=cols["new_col_1"])
cols["weirdname_index"] = hdmf.VectorIndex(
name="weirdname_index", description="", value=np.arange(10), target=cols["new_col_1"]
)
# implicit index with matching name
cols["new_col_2_index"] = hdmf.VectorIndex(value=np.arange(10))
cols["new_col_2_index"] = hdmf.VectorIndex(
name="new_col_2_index", description="", value=np.arange(10)
)
inst = MyDT(**cols)
assert inst.weirdname_index.target is inst.new_col_1
@ -282,14 +286,14 @@ def test_dynamictable_assert_equal_length():
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(11)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(11)),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
cols = {
"existing_col": np.arange(11),
"new_col_1": hdmf.VectorData(value=np.arange(10)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
@ -297,16 +301,20 @@ def test_dynamictable_assert_equal_length():
# wrong lengths are fine as long as the index is good
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(100)),
"new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 10) + 10),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)),
"new_col_1_index": hdmf.VectorIndex(
name="new_col_1_index", description="", value=np.arange(0, 100, 10) + 10
),
}
_ = MyDT(**cols)
# but not fine if the index is not good
cols = {
"existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(100)),
"new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(100)),
"new_col_1_index": hdmf.VectorIndex(
name="new_col_1_index", description="", value=np.arange(0, 100, 5) + 5
),
}
with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols)
@ -321,8 +329,8 @@ def test_dynamictable_setattr():
existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
cols = {
"existing_col": hdmf.VectorData(value=np.arange(10)),
"new_col_1": hdmf.VectorData(value=np.arange(10)),
"existing_col": hdmf.VectorData(name="existing_col", description="", value=np.arange(10)),
"new_col_1": hdmf.VectorData(name="new_col_1", description="", value=np.arange(10)),
}
inst = MyDT(existing_col=cols["existing_col"])
assert inst.colnames == ["existing_col"]
@ -335,7 +343,7 @@ def test_dynamictable_setattr():
# model validators should be called to ensure equal length
with pytest.raises(ValidationError):
inst.new_col_2 = hdmf.VectorData(value=np.arange(11))
inst.new_col_2 = hdmf.VectorData(name="new_col_2", description="", value=np.arange(11))
def test_vectordata_indexing():
@ -346,7 +354,7 @@ def test_vectordata_indexing():
value_array, index_array = _ragged_array(n_rows)
value_array = np.concatenate(value_array)
data = hdmf.VectorData(value=value_array)
data = hdmf.VectorData(name="data", description="", value=value_array)
# before we have an index, things should work as normal, indexing a 1D array
assert data[0] == 0
@ -356,7 +364,7 @@ def test_vectordata_indexing():
data[0] = 0
# indexes by themselves are the same
index_notarget = hdmf.VectorIndex(value=index_array)
index_notarget = hdmf.VectorIndex(name="no_target_index", description="", value=index_array)
assert index_notarget[0] == index_array[0]
assert all(index_notarget[0:3] == index_array[0:3])
oldval = index_array[0]
@ -364,7 +372,7 @@ def test_vectordata_indexing():
assert index_notarget[0] == 5
index_notarget[0] = oldval
index = hdmf.VectorIndex(value=index_array, target=data)
index = hdmf.VectorIndex(name="data_index", description="", value=index_array, target=data)
data._index = index
# after an index, both objects should index raggedly
@ -396,8 +404,10 @@ def test_vectordata_getattr():
"""
VectorData and VectorIndex both forward getattr to ``value``
"""
data = hdmf.VectorData(value=np.arange(100))
index = hdmf.VectorIndex(value=np.arange(10, 101, 10), target=data)
data = hdmf.VectorData(name="data", description="", value=np.arange(100))
index = hdmf.VectorIndex(
name="data_index", description="", value=np.arange(10, 101, 10), target=data
)
# get attrs that we defined on the models
# i.e. no attribute errors here
@ -447,7 +457,9 @@ def test_dynamictable_region_indexing(basic_table):
index = np.array([9, 4, 8, 3, 7, 2, 6, 1, 5, 0])
table_region = hdmf.DynamicTableRegion(value=index, table=inst)
table_region = hdmf.DynamicTableRegion(
name="table_region", description="", value=index, table=inst
)
row = table_region[1]
assert all(row.iloc[0] == index[1])
@ -499,10 +511,14 @@ def test_dynamictable_region_ragged():
timeseries_index=spike_idx,
)
region = hdmf.DynamicTableRegion(
name="region",
description="a table region what else would it be",
table=table,
value=value,
)
index = hdmf.VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx)
index = hdmf.VectorIndex(
name="region_index", description="hgggggggjjjj", target=region, value=idx
)
region._index = index
rows = region[1]
@ -594,8 +610,8 @@ def test_mixed_aligned_dynamictable(aligned_table):
value_array, index_array = _ragged_array(10)
value_array = np.concatenate(value_array)
data = hdmf.VectorData(value=value_array)
index = hdmf.VectorIndex(value=index_array)
data = hdmf.VectorData(name="data", description="", value=value_array)
index = hdmf.VectorIndex(name="data_index", description="", value=index_array)
atable = AlignedTable(**cols, extra_col=data, extra_col_index=index)
atable[0]

View file

@ -80,7 +80,7 @@ def test_position(read_nwbfile, read_pynwb):
py_trials = read_pynwb.trials.to_dataframe()
pd.testing.assert_frame_equal(py_trials, trials)
spatial = read_nwbfile.processing["behavior"].Position.SpatialSeries
spatial = read_nwbfile.processing["behavior"]["Position"]["SpatialSeries"]
py_spatial = read_pynwb.processing["behavior"]["Position"]["SpatialSeries"]
_compare_attrs(spatial, py_spatial)
assert np.array_equal(spatial[:], py_spatial.data[:])

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_0.hdmf_common_table import Container, Data, DynamicTable
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBContainer,
@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import (
@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_0.core_nwb_base import NWBData, NWBDataInterface
@ -31,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -65,6 +66,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -75,12 +88,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_0.core_nwb_base import (
Image,
@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -193,12 +205,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_2.hdmf_common_table import Container, Data, DynamicTable
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBContainer,
@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import (
@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_1.core_nwb_base import NWBData, NWBDataInterface
@ -31,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -41,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -65,6 +66,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -75,12 +88,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_1.core_nwb_base import (
Image,
@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -170,7 +170,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -183,6 +183,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -193,12 +205,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_2.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBContainer,
@ -40,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -50,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -61,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -74,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -84,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_2.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_2.core_nwb_base import (
@ -39,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -49,7 +50,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -60,7 +61,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -73,6 +74,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -83,12 +96,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import NWBDataInterface
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_2.core_nwb_base import (
Image,
@ -152,7 +152,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -162,7 +162,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -173,7 +173,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -186,6 +186,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -196,12 +208,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_4.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBContainer,
@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -85,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_4.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_4.core_nwb_base import (
@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import NWBDataInterface
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_4.core_nwb_base import (
Image,
@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -203,12 +215,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_5.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBContainer,
@ -41,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -51,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -62,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -75,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -85,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_5.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_2_5.core_nwb_base import (
@ -45,7 +46,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -55,7 +56,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -66,7 +67,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -79,6 +80,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -89,12 +102,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import NWBDataInterface
@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -32,7 +32,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -43,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -56,6 +56,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -66,12 +78,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_2_5.core_nwb_base import (
Image,
@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -169,7 +169,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -180,7 +180,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -193,6 +193,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -203,12 +215,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable
@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -67,12 +79,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -138,9 +175,9 @@ class Image(NWBData):
description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
NDArray[Shape["* x, * y"], float | int],
NDArray[Shape["* x, * y, 3 r_g_b"], float | int],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int],
]
] = Field(None)
@ -305,13 +342,16 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
name: str = Field(...)
description: str = Field(
..., description="""Description of this collection of processed data."""
)
value: Optional[Dict[str, Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
},
)
name: str = Field(...)
class Images(NWBDataInterface):

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBDataInterface,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -176,6 +213,20 @@ class SpatialSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Optional[str] = Field(
"meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@ -183,8 +234,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
NDArray[Shape["* num_times"], float | int],
NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@ -198,10 +249,13 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"BehavioralEpochs",
json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEpochs)"}},
)
value: Optional[Dict[str, IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
class BehavioralEvents(NWBDataInterface):
@ -213,10 +267,13 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"BehavioralEvents",
json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralEvents)"}},
)
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
class BehavioralTimeSeries(NWBDataInterface):
@ -228,10 +285,13 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"BehavioralTimeSeries",
json_schema_extra={"linkml_meta": {"ifabsent": "string(BehavioralTimeSeries)"}},
)
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
class PupilTracking(NWBDataInterface):
@ -243,10 +303,12 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"PupilTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(PupilTracking)"}}
)
value: Optional[Dict[str, TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
class EyeTracking(NWBDataInterface):
@ -258,10 +320,12 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"EyeTracking", json_schema_extra={"linkml_meta": {"ifabsent": "string(EyeTracking)"}}
)
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
class CompassDirection(NWBDataInterface):
@ -273,10 +337,13 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"CompassDirection",
json_schema_extra={"linkml_meta": {"ifabsent": "string(CompassDirection)"}},
)
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
class Position(NWBDataInterface):
@ -288,10 +355,12 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
name: str = Field(
"Position", json_schema_extra={"linkml_meta": {"ifabsent": "string(Position)"}}
)
value: Optional[Dict[str, SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
# Model rebuild

View file

@ -8,7 +8,7 @@ from enum import Enum
from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import NWBContainer
@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -42,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -55,6 +55,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -65,12 +77,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_3_0.core_nwb_base import (
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -156,11 +194,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
data: ElectricalSeriesData = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@ -173,11 +212,6 @@ class ElectricalSeries(TimeSeries):
}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@ -215,6 +249,45 @@ class ElectricalSeries(TimeSeries):
)
class ElectricalSeriesData(ConfiguredBaseModel):
"""
Recorded voltage data.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and 'channel_conversion' (if present).""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Optional[
Union[
NDArray[Shape["* num_times"], float | int],
NDArray[Shape["* num_times, * num_channels"], float | int],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float | int],
]
] = Field(None)
class SpikeEventSeries(ElectricalSeries):
"""
Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
@ -225,10 +298,7 @@ class SpikeEventSeries(ElectricalSeries):
)
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
data: SpikeEventSeriesData = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
@ -238,6 +308,11 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
@ -250,11 +325,6 @@ class SpikeEventSeries(ElectricalSeries):
}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
description: Optional[str] = Field(
"no description",
description="""Description of the time series.""",
@ -287,6 +357,44 @@ class SpikeEventSeries(ElectricalSeries):
)
class SpikeEventSeriesData(ConfiguredBaseModel):
"""
Spike waveforms.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ecephys"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["volts"] = Field(
"volts",
description="""Unit of measurement for waveforms, which is fixed to 'volts'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Optional[
Union[
NDArray[Shape["* num_events, * num_samples"], float | int],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float | int],
]
] = Field(None)
class FeatureExtraction(NWBDataInterface):
"""
Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
@ -385,10 +493,12 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
name: str = Field(
"EventWaveform", json_schema_extra={"linkml_meta": {"ifabsent": "string(EventWaveform)"}}
)
value: Optional[Dict[str, SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
class FilteredEphys(NWBDataInterface):
@ -400,10 +510,12 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
name: str = Field(
"FilteredEphys", json_schema_extra={"linkml_meta": {"ifabsent": "string(FilteredEphys)"}}
)
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
class LFP(NWBDataInterface):
@ -415,10 +527,10 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
name: str = Field("LFP", json_schema_extra={"linkml_meta": {"ifabsent": "string(LFP)"}})
value: Optional[Dict[str, ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
class ElectrodeGroup(NWBContainer):
@ -561,7 +673,9 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
ElectricalSeriesData.model_rebuild()
SpikeEventSeries.model_rebuild()
SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_3_0.core_nwb_base import TimeSeries
@ -36,7 +37,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +47,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +58,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +71,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +93,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBContainer,
@ -25,7 +25,12 @@ from ...core.v2_3_0.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_3_0.core_nwb_misc import Units
from ...core.v2_3_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_3_0.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, ElementIdentifiers, VectorData
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None"
@ -36,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -46,7 +51,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -57,7 +62,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -70,6 +75,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -80,12 +97,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -222,6 +264,9 @@ class NWBFile(NWBContainer):
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
specifications: Optional[dict] = Field(
None, description="""Nested dictionary of schema specifications"""
)
class NWBFileStimulus(ConfiguredBaseModel):
@ -320,10 +365,6 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""",
)
lab_meta_data: Optional[Dict[str, LabMetaData]] = Field(
None,
description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
)
devices: Optional[Dict[str, Device]] = Field(
None,
description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""",
@ -349,6 +390,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
description="""Metadata related to optophysiology.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImagingPlane"}]}},
)
value: Optional[Dict[str, LabMetaData]] = Field(
None,
description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""",
)
class GeneralSourceScript(ConfiguredBaseModel):
@ -384,12 +429,12 @@ class GeneralExtracellularEphys(ConfiguredBaseModel):
}
},
)
electrode_group: Optional[Dict[str, ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
value: Optional[Dict[str, ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
class ExtracellularEphysElectrodes(DynamicTable):
@ -545,12 +590,12 @@ class GeneralIntracellularEphys(ConfiguredBaseModel):
None,
description="""Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""",
)
intracellular_electrode: Optional[Dict[str, IntracellularElectrode]] = Field(
None, description="""An intracellular electrode."""
)
sweep_table: Optional[SweepTable] = Field(
None, description="""The table which groups different PatchClampSeries together."""
)
value: Optional[Dict[str, IntracellularElectrode]] = Field(
None, description="""An intracellular electrode."""
)
class NWBFileIntervals(ConfiguredBaseModel):
@ -576,7 +621,7 @@ class NWBFileIntervals(ConfiguredBaseModel):
invalid_times: Optional[TimeIntervals] = Field(
None, description="""Time intervals that should be removed from analysis."""
)
time_intervals: Optional[Dict[str, TimeIntervals]] = Field(
value: Optional[Dict[str, TimeIntervals]] = Field(
None,
description="""Optional additional table(s) for describing other experimental time intervals.""",
)

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_3_0.core_nwb_base import (
@ -42,7 +43,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -52,7 +53,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -63,7 +64,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -76,6 +77,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -86,12 +99,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -224,11 +262,25 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
value: Optional[NDArray[Shape["* num_times"], float]] = Field(
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -243,12 +295,12 @@ class CurrentClampSeries(PatchClampSeries):
)
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: str = Field(
..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
@ -316,12 +368,28 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class IZeroClampSeries(CurrentClampSeries):
@ -476,6 +544,20 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
@ -483,7 +565,9 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
value: Any = Field(...)
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class VoltageClampSeries(PatchClampSeries):
@ -496,13 +580,13 @@ class VoltageClampSeries(PatchClampSeries):
)
name: str = Field(...)
data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[VoltageClampSeriesCapacitanceFast] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[VoltageClampSeriesCapacitanceSlow] = Field(
None, description="""Slow capacitance, in farads."""
)
data: VoltageClampSeriesData = Field(..., description="""Recorded current.""")
resistance_comp_bandwidth: Optional[VoltageClampSeriesResistanceCompBandwidth] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -574,27 +658,6 @@ class VoltageClampSeries(PatchClampSeries):
)
class VoltageClampSeriesData(ConfiguredBaseModel):
"""
Recorded current.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
value: Any = Field(...)
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
"""
Fast capacitance, in farads.
@ -647,6 +710,43 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
value: float = Field(...)
class VoltageClampSeriesData(ConfiguredBaseModel):
"""
Recorded current.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.icephys"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["amperes"] = Field(
"amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={
"linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
},
)
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
"""
Resistance compensation bandwidth, in hertz.
@ -851,12 +951,28 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["volts"] = Field(
"volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class IntracellularElectrode(NWBContainer):
@ -906,15 +1022,6 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
"linkml_meta": {
"array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
}
},
)
series: VectorData[NDArray[Any, PatchClampSeries]] = Field(
...,
description="""The PatchClampSeries with the sweep number in that row.""",
@ -936,6 +1043,15 @@ class SweepTable(DynamicTable):
}
},
)
sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
"linkml_meta": {
"array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
}
},
)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -958,9 +1074,9 @@ IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
from ...core.v2_3_0.core_nwb_device import Device
@ -23,7 +23,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -33,7 +33,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -44,7 +44,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -57,6 +57,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -67,12 +79,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -116,7 +153,7 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
value: Optional[NDArray[Shape["* x, * y"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
@ -138,7 +175,7 @@ class RGBImage(Image):
)
name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -168,7 +205,7 @@ class RGBAImage(Image):
)
name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float | int]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -198,12 +235,9 @@ class ImageSeries(TimeSeries):
)
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
data: Optional[ImageSeriesData] = Field(
None, description="""Binary data representing images across frames."""
)
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
@ -214,8 +248,9 @@ class ImageSeries(TimeSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
None,
"raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@ -263,6 +298,43 @@ class ImageSeries(TimeSeries):
)
class ImageSeriesData(ConfiguredBaseModel):
"""
Binary data representing images across frames.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
value: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], float | int],
NDArray[Shape["* frame, * x, * y, * z"], float | int],
]
] = Field(None)
class ImageSeriesExternalFile(ConfiguredBaseModel):
"""
Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.
@ -304,12 +376,9 @@ class ImageMaskSeries(ImageSeries):
}
},
)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
data: Optional[ImageSeriesData] = Field(
None, description="""Binary data representing images across frames."""
)
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
@ -320,8 +389,9 @@ class ImageMaskSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
None,
"raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@ -379,6 +449,9 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
data: OpticalSeriesData = Field(
..., description="""Images presented to subject, either grayscale or RGB"""
)
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
@ -387,10 +460,6 @@ class OpticalSeries(ImageSeries):
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@ -405,8 +474,9 @@ class OpticalSeries(ImageSeries):
description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""",
)
format: Optional[str] = Field(
None,
"raw",
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(raw)"}},
)
device: Optional[Union[Device, str]] = Field(
None,
@ -454,6 +524,43 @@ class OpticalSeries(ImageSeries):
)
class OpticalSeriesData(ConfiguredBaseModel):
"""
Images presented to subject, either grayscale or RGB
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
value: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], float | int],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float | int],
]
] = Field(None)
class IndexSeries(TimeSeries):
"""
Stores indices to image frames stored in an ImageSeries. The purpose of the ImageIndexSeries is to allow a static image stack to be stored somewhere, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced ImageSeries, and the timestamps array indicates when that image was displayed.
@ -464,10 +571,8 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
data: IndexSeriesData = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
indexed_timeseries: Union[ImageSeries, str] = Field(
...,
@ -515,13 +620,50 @@ class IndexSeries(TimeSeries):
)
class IndexSeriesData(ConfiguredBaseModel):
"""
Index of the frame in the referenced ImageSeries.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.image"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: str = Field(
...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
value: Optional[NDArray[Shape["* num_times"], int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
ImageSeriesData.model_rebuild()
ImageSeriesExternalFile.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
IndexSeriesData.model_rebuild()

View file

@ -17,6 +17,7 @@ from pydantic import (
RootModel,
ValidationInfo,
field_validator,
model_validator,
)
from ...core.v2_3_0.core_nwb_base import TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@ -38,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -48,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -59,7 +60,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -72,6 +73,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -82,12 +95,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -213,6 +251,20 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Optional[str] = Field(
"see ",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
@ -220,8 +272,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
value: Optional[
Union[
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
NDArray[Shape["* num_times"], float | int],
NDArray[Shape["* num_times, * num_features"], float | int],
]
] = Field(None)
@ -236,10 +288,8 @@ class AnnotationSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], str] = Field(
...,
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
data: AnnotationSeriesData = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(
"no description",
@ -278,6 +328,43 @@ class AnnotationSeries(TimeSeries):
)
class AnnotationSeriesData(ConfiguredBaseModel):
"""
Annotations made during an experiment.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: float = Field(
-1.0,
description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
le=-1,
ge=-1,
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["n/a"] = Field(
"n/a",
description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
)
value: Optional[NDArray[Shape["* num_times"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class IntervalSeries(TimeSeries):
"""
Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
@ -288,10 +375,8 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
data: IntervalSeriesData = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(
"no description",
@ -330,6 +415,43 @@ class IntervalSeries(TimeSeries):
)
class IntervalSeriesData(ConfiguredBaseModel):
"""
Use values >0 if interval started, <0 if interval ended.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: float = Field(
-1.0,
description="""Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0.""",
le=-1,
ge=-1,
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["n/a"] = Field(
"n/a",
description="""Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'.""",
json_schema_extra={"linkml_meta": {"equals_string": "n/a", "ifabsent": "string(n/a)"}},
)
value: Optional[NDArray[Shape["* num_times"], int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class DecompositionSeries(TimeSeries):
"""
Spectral analysis of a time series, e.g. of an LFP or a speech signal.
@ -417,24 +539,40 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: str = Field(
"no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "num_times"},
{"alias": "num_channels"},
{"alias": "num_bands"},
]
value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float | int]] = (
Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "num_times"},
{"alias": "num_channels"},
{"alias": "num_bands"},
]
}
}
}
},
},
)
)
@ -504,9 +642,18 @@ class Units(DynamicTable):
)
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
spike_times_index: Optional[Named[VectorIndex]] = Field(
electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
description="""Index into the spike_times dataset.""",
description="""Electrode group that each spike unit came from.""",
json_schema_extra={
"linkml_meta": {
"array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
}
},
)
electrodes: Optional[Named[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@ -516,12 +663,9 @@ class Units(DynamicTable):
}
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[Named[VectorIndex]] = Field(
electrodes_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
description="""Index into electrodes.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@ -547,9 +691,9 @@ class Units(DynamicTable):
},
)
)
electrodes_index: Optional[Named[VectorIndex]] = Field(
obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
description="""Index into the obs_intervals dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@ -559,9 +703,12 @@ class Units(DynamicTable):
}
},
)
electrodes: Optional[Named[DynamicTableRegion]] = Field(
spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit."""
)
spike_times_index: Optional[Named[VectorIndex]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
description="""Index into the spike_times dataset.""",
json_schema_extra={
"linkml_meta": {
"annotations": {
@ -571,41 +718,15 @@ class Units(DynamicTable):
}
},
)
electrode_group: Optional[VectorData[NDArray[Any, ElectrodeGroup]]] = Field(
None,
description="""Electrode group that each spike unit came from.""",
json_schema_extra={
"linkml_meta": {
"array": {"maximum_number_dimensions": False, "minimum_number_dimensions": 1}
}
},
waveform_mean: Optional[UnitsWaveformMean] = Field(
None, description="""Spike waveform mean for each spike unit."""
)
waveform_mean: Optional[
VectorData[
Union[
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
VectorData[
Union[
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[VectorData[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
json_schema_extra={
"linkml_meta": {
"array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
}
},
)
waveform_sd: Optional[UnitsWaveformSd] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
waveforms: Optional[UnitsWaveforms] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
waveforms_index: Optional[Named[VectorIndex]] = Field(
None,
@ -671,14 +792,109 @@ class UnitsSpikeTimes(VectorData):
] = Field(None)
class UnitsWaveformMean(VectorData):
"""
Spike waveform mean for each spike unit.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
name: Literal["waveform_mean"] = Field(
"waveform_mean",
json_schema_extra={
"linkml_meta": {"equals_string": "waveform_mean", "ifabsent": "string(waveform_mean)"}
},
)
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[Literal["volts"]] = Field(
"volts",
description="""Unit of measurement. This value is fixed to 'volts'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class UnitsWaveformSd(VectorData):
"""
Spike waveform standard deviation for each spike unit.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
name: Literal["waveform_sd"] = Field(
"waveform_sd",
json_schema_extra={
"linkml_meta": {"equals_string": "waveform_sd", "ifabsent": "string(waveform_sd)"}
},
)
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[Literal["volts"]] = Field(
"volts",
description="""Unit of measurement. This value is fixed to 'volts'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class UnitsWaveforms(VectorData):
"""
Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.misc"})
name: Literal["waveforms"] = Field(
"waveforms",
json_schema_extra={
"linkml_meta": {"equals_string": "waveforms", "ifabsent": "string(waveforms)"}
},
)
sampling_rate: Optional[float] = Field(None, description="""Sampling rate, in hertz.""")
unit: Optional[Literal["volts"]] = Field(
"volts",
description="""Unit of measurement. This value is fixed to 'volts'.""",
json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
AnnotationSeriesData.model_rebuild()
IntervalSeries.model_rebuild()
IntervalSeriesData.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesBands.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimes.model_rebuild()
UnitsWaveformMean.model_rebuild()
UnitsWaveformSd.model_rebuild()
UnitsWaveforms.model_rebuild()

View file

@ -9,7 +9,7 @@ from typing import Any, ClassVar, Dict, List, Literal, Optional, Union
import numpy as np
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...core.v2_3_0.core_nwb_base import (
NWBContainer,
@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="allow",
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, val: Union[int, slice]) -> Any:
def __getitem__(self, val: Union[int, slice, str]) -> Any:
"""Try and get a value from value or "data" if we have it"""
if hasattr(self, "value") and self.value is not None:
return self.value[val]
@ -49,7 +49,7 @@ class ConfiguredBaseModel(BaseModel):
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
def coerce_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
@ -62,6 +62,18 @@ class ConfiguredBaseModel(BaseModel):
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
"""Try to rescue instantiation by casting into the model's value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
@ -72,12 +84,37 @@ class ConfiguredBaseModel(BaseModel):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
if v.__pydantic_extra__:
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
else:
v = annotation(**v.__dict__)
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
@model_validator(mode="before")
@classmethod
def gather_extra_to_value(cls, v: Any) -> Any:
"""
For classes that don't allow extra fields and have a value slot,
pack those extra kwargs into ``value``
"""
if (
cls.model_config["extra"] == "forbid"
and "value" in cls.model_fields
and isinstance(v, dict)
):
extras = {key: val for key, val in v.items() if key not in cls.model_fields}
if extras:
for k in extras:
del v[k]
if "value" in v:
v["value"].update(extras)
else:
v["value"] = extras
return v
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@ -121,10 +158,8 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
data: OptogeneticSeriesData = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
site: Union[OptogeneticStimulusSite, str] = Field(
...,
@ -172,6 +207,41 @@ class OptogeneticSeries(TimeSeries):
)
class OptogeneticSeriesData(ConfiguredBaseModel):
"""
Applied power for optogenetic stimulus, in watts.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ogen"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
conversion: Optional[float] = Field(
1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
resolution: Optional[float] = Field(
-1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
unit: Literal["watts"] = Field(
"watts",
description="""Unit of measurement for data, which is fixed to 'watts'.""",
json_schema_extra={"linkml_meta": {"equals_string": "watts", "ifabsent": "string(watts)"}},
)
value: Optional[NDArray[Shape["* num_times"], float | int]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
class OptogeneticStimulusSite(NWBContainer):
"""
A site of optogenetic stimulation.
@ -202,4 +272,5 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
OptogeneticSeriesData.model_rebuild()
OptogeneticStimulusSite.model_rebuild()

Some files were not shown because too many files have changed in this diff Show more